async def __call__(self): parent = self.context.__parent__ if parent is not None: # We render the summary of the parent try: parent_summary = await getMultiAdapter( (parent, self.request), IResourceSerializeToJsonSummary)() except ComponentLookupError: parent_summary = {} else: parent_summary = {} result = { '@id': IAbsoluteURL(self.context, self.request)(), '@type': self.context.type_name, 'parent': parent_summary, 'creation_date': json_compatible(self.context.creation_date), 'modification_date': json_compatible(self.context.modification_date), 'UID': self.context.uuid, } factory = get_cached_factory(self.context.type_name) main_schema = factory.schema await self.get_schema(main_schema, self.context, result, False) for behavior_schema, behavior in await get_all_behaviors(self.context): await self.get_schema(behavior_schema, behavior, result, True) return result
async def __call__(self, include=[], omit=[]): self.include = include self.omit = omit parent = self.context.__parent__ if parent is not None: # We render the summary of the parent try: parent_summary = await get_multi_adapter( (parent, self.request), IResourceSerializeToJsonSummary)() except ComponentLookupError: parent_summary = {} else: parent_summary = {} factory = get_cached_factory(self.context.type_name) behaviors = [] for behavior_schema in factory.behaviors or (): behaviors.append(behavior_schema.__identifier__) result = { '@id': IAbsoluteURL(self.context, self.request)(), '@type': self.context.type_name, '@name': self.context.__name__, '@uid': self.context.uuid, '@static_behaviors': behaviors, 'parent': parent_summary, # should be @parent 'is_folderish': IFolder.providedBy(self.context), # eek, should be @folderish? 'creation_date': json_compatible(self.context.creation_date), 'modification_date': json_compatible(self.context.modification_date), 'UID': self.context.uuid, # should be removed } main_schema = factory.schema await self.get_schema(main_schema, self.context, result, False) # include can be one of: # - <field name> on content schema # - namespace.IBehavior # - namespace.IBehavior.field_name included_ifaces = [name for name in self.include if '.' in name] included_ifaces.extend([name.rsplit('.', 1)[0] for name in self.include if '.' in name]) for behavior_schema, behavior in await get_all_behaviors(self.context, load=False): if '*' not in self.include: dotted_name = behavior_schema.__identifier__ if (dotted_name in self.omit or (len(included_ifaces) > 0 and dotted_name not in included_ifaces)): # make sure the schema isn't filtered continue if (not getattr(behavior, 'auto_serialize', True) and dotted_name not in included_ifaces): continue if IAsyncBehavior.implementedBy(behavior.__class__): # providedBy not working here? await behavior.load(create=False) await self.get_schema(behavior_schema, behavior, result, True) return result
async def __call__(self, include=[], omit=[]): self.include = include self.omit = omit parent = self.context.__parent__ if parent is not None: # We render the summary of the parent try: parent_summary = await get_multi_adapter( (parent, self.request), IResourceSerializeToJsonSummary)() except ComponentLookupError: parent_summary = {} else: parent_summary = {} result = { '@id': IAbsoluteURL(self.context, self.request)(), '@type': self.context.type_name, '@name': self.context.__name__, '@uid': self.context.uuid, 'parent': parent_summary, 'is_folderish': IFolder.providedBy(self.context), 'creation_date': json_compatible(self.context.creation_date), 'modification_date': json_compatible(self.context.modification_date), 'UID': self.context.uuid, } factory = get_cached_factory(self.context.type_name) main_schema = factory.schema await self.get_schema(main_schema, self.context, result, False) # include can be one of: # - <field name> on content schema # - namespace.IBehavior # - namespace.IBehavior.field_name included_ifaces = [name for name in self.include if '.' in name] included_ifaces.extend( [name.rsplit('.', 1)[0] for name in self.include if '.' in name]) for behavior_schema, behavior in await get_all_behaviors(self.context, load=False): dotted_name = behavior_schema.__identifier__ if (dotted_name in self.omit or (len(included_ifaces) > 0 and dotted_name not in included_ifaces)): # make sure the schema isn't filtered continue if (not getattr(behavior, 'auto_serialize', True) and dotted_name not in included_ifaces): continue if IAsyncBehavior.implementedBy(behavior.__class__): # providedBy not working here? await behavior.load(create=False) await self.get_schema(behavior_schema, behavior, result, True) return result
def get_data(self, ob, iface, field_name): try: field = iface[field_name] real_field = field.bind(ob) try: value = real_field.get(ob) return json_compatible(value) except AttributeError: pass except KeyError: pass value = getattr(ob, field_name, None) return json_compatible(value)
async def __call__(self): summary = json_compatible({ '@id': IAbsoluteURL(self.context)(), '@type': self.context.type_name }) return summary
def test_vocabulary(dummy_request): from guillotina.schema.vocabulary import SimpleVocabulary vocab = SimpleVocabulary.fromItems(( (u"Foo", "id_foo"), (u"Bar", "id_bar"))) res = json_compatible(vocab) assert type(res) == list
async def serialize_field(self, context, field, default=None): try: value = await apply_coroutine(field.get, context) except Exception: logger.warning(f'Could not find value for schema field' f'({self.field.__name__}), falling back to getattr') value = getattr(context, field.__name__, default) return json_compatible(value)
async def __call__(self): try: result = json_compatible(self.value) except (ComponentLookupError, TypeError): result = self.value return { 'value': result }
async def test_serialize_cloud_file(dummy_request): from guillotina.test_package import FileContent obj = create_content(FileContent) obj.file = DBFile(filename='foobar.json', size=25, md5='foobar') value = json_compatible(obj.file) assert value['filename'] == 'foobar.json' assert value['size'] == 25 assert value['md5'] == 'foobar'
async def __call__(self): if self.key is _marker: # Root of registry self.value = self.request.container_settings if IRegistry.providedBy(self.value): result = {} for key in self.value.keys(): try: value = json_compatible(self.value[key]) except (ComponentLookupError, TypeError): value = self.value[key] result[key] = value else: try: result = json_compatible(self.value) except (ComponentLookupError, TypeError): result = self.value return {'value': result}
async def get_registry(context, request): result = {} for key in request.container_settings.keys(): try: value = json_compatible(request.container_settings[key]) except (ComponentLookupError, TypeError): value = request.container_settings[key] result[key] = value return {'value': result}
async def get_registry_service(context, request): result = {} registry = await get_registry(context) for key in registry.keys(): try: value = json_compatible(registry[key]) except (ComponentLookupError, TypeError): value = registry[key] result[key] = value return {"value": result}
async def __call__(self): summary = json_compatible({ '@id': IAbsoluteURL(self.context)(), '@name': self.context.__name__, '@type': self.context.type_name, '@uid': self.context.uuid, 'UID': self.context.uuid }) return summary
async def __call__(self): parent = self.context.__parent__ if parent is not None: # We render the summary of the parent try: parent_summary = await getMultiAdapter( (parent, self.request), IResourceSerializeToJsonSummary)() except ComponentLookupError: parent_summary = {} else: parent_summary = {} result = { '@id': IAbsoluteURL(self.context, self.request)(), '@type': self.context.portal_type, 'parent': parent_summary, 'created': json_compatible(self.context.created), 'modified': json_compatible(self.context.modified), 'UID': self.context.uuid, } factory = get_cached_factory(self.context.portal_type) main_schema = factory.schema await self.get_schema(main_schema, self.context, result, False) for behavior_schema in factory.behaviors or (): behavior = behavior_schema(self.context) if IAsyncBehavior.implementedBy(behavior.__class__): # providedBy not working here? await behavior.load() await self.get_schema(behavior_schema, behavior, result, True) for dynamic_behavior in self.context.__behaviors__ or (): dynamic_behavior_obj = BEHAVIOR_CACHE[dynamic_behavior] behavior = dynamic_behavior_obj(self.context) if IAsyncBehavior.implementedBy(dynamic_behavior_obj.__class__): # providedBy not working here? await behavior.load() await self.get_schema(dynamic_behavior_obj, behavior, result, True) return result
async def get_registry(context, request): result = {} for key in request.container_settings.keys(): try: value = json_compatible(request.container_settings[key]) except (ComponentLookupError, TypeError): value = request.container_settings[key] result[key] = value return { 'value': result }
async def serialize_field(self, context, field, default=None): try: value = await apply_coroutine(field.get, context) except Exception: logger.warning(f'Could not find value for schema field' f'({self.field.__name__}), falling back to getattr') value = getattr(context, field.__name__, default) result = json_compatible(value) if asyncio.iscoroutine(result): result = await result return result
async def __call__(self): summary = json_compatible({ "@id": IAbsoluteURL(self.context)(), "@type": self.context.type_name, "@name": self.context.__name__, "@uid": self.context.uuid, "UID": self.context.uuid, "title": self.context.title, }) return summary
async def serialize_field(self, context, field, default=None): try: value = await apply_coroutine(field.get, context) except Exception: logger.warning(f"Could not find value for schema field" f"({field.__name__}), falling back to getattr") value = getattr(context, field.__name__, default) result = json_compatible(value) if asyncio.iscoroutine(result): result = await result return result
def __call__(self): """ access_users and access_roles """ return { "path": get_content_path(self.content), "depth": get_content_depth(self.content), "parent_uuid": getattr(getattr(self.content, "__parent__", None), "uuid", None), "access_users": get_principals_with_access_content(self.content), "access_roles": get_roles_with_access_content(self.content), "type_name": self.content.type_name, "tid": self.content.__serial__, "modification_date": json_compatible(self.content.modification_date), }
async def __call__(self): summary = json_compatible({ "@id": get_object_url(self.context, self.request), "@name": self.context.__name__, "@type": self.context.type_name, "@uid": self.context.uuid, }) return summary
async def test_serialize_cloud_file(dummy_request, mock_txn): from guillotina.test_package import FileContent, IFileContent from guillotina.interfaces import IFileManager obj = create_content(FileContent) obj.file = DBFile(filename="foobar.json", md5="foobar") fm = get_multi_adapter( (obj, dummy_request, IFileContent["file"].bind(obj)), IFileManager) await fm.dm.load() await fm.file_storage_manager.start(fm.dm) async def _data(): yield b'{"foo": "bar"}' await fm.file_storage_manager.append(fm.dm, _data(), 0) await fm.file_storage_manager.finish(fm.dm) await fm.dm.finish() value = json_compatible(obj.file) assert value["filename"] == "foobar.json" assert value["size"] == 14 assert value["md5"] == "foobar"
async def test_serialize_cloud_file(dummy_request, dummy_guillotina): txn = mocks.MockTransaction() with txn: from guillotina.test_package import FileContent, IFileContent from guillotina.interfaces import IFileManager obj = create_content(FileContent) obj.file = DBFile(filename='foobar.json', md5='foobar') fm = get_multi_adapter( (obj, dummy_request, IFileContent['file'].bind(obj)), IFileManager) await fm.dm.load() await fm.file_storage_manager.start(fm.dm) async def _data(): yield b'{"foo": "bar"}' await fm.file_storage_manager.append(fm.dm, _data(), 0) await fm.file_storage_manager.finish(fm.dm) await fm.dm.finish() value = json_compatible(obj.file) assert value['filename'] == 'foobar.json' assert value['size'] == 14 assert value['md5'] == 'foobar'
async def test_serialize_cloud_file(dummy_request, dummy_guillotina): request = dummy_request request._txn = mocks.MockTransaction() from guillotina.test_package import FileContent, IFileContent from guillotina.interfaces import IFileManager obj = create_content(FileContent) obj.file = DBFile(filename='foobar.json', md5='foobar') fm = get_multi_adapter( (obj, request, IFileContent['file'].bind(obj)), IFileManager) await fm.dm.load() await fm.file_storage_manager.start(fm.dm) async def _data(): yield b'{"foo": "bar"}' await fm.file_storage_manager.append(fm.dm, _data(), 0) await fm.file_storage_manager.finish(fm.dm) await fm.dm.finish() value = json_compatible(obj.file) assert value['filename'] == 'foobar.json' assert value['size'] == 14 assert value['md5'] == 'foobar'
async def test_bucket_list_field(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) await deserializer.set_schema(ITestSchema, content, { "bucket_list": { "op": "append", "value": { "key": "foo", "value": "bar" } } }, []) assert content.bucket_list.annotations_metadata[0]["len"] == 1 assert await content.bucket_list.get(content, 0, 0) == { "key": "foo", "value": "bar" } assert await content.bucket_list.get(content, 0, 1) is None assert await content.bucket_list.get(content, 1, 0) is None for _ in range(100): await deserializer.set_schema( ITestSchema, content, { "bucket_list": { "op": "append", "value": { "key": "foo", "value": "bar" } } }, [], ) assert len(content.bucket_list.annotations_metadata) == 11 assert content.bucket_list.annotations_metadata[0]["len"] == 10 assert content.bucket_list.annotations_metadata[5]["len"] == 10 assert content.bucket_list.annotations_metadata[10]["len"] == 1 await content.bucket_list.remove(content, 10, 0) assert content.bucket_list.annotations_metadata[10]["len"] == 0 await content.bucket_list.remove(content, 9, 0) assert content.bucket_list.annotations_metadata[9]["len"] == 9 assert len(content.bucket_list) == 99 await deserializer.set_schema( ITestSchema, content, { "bucket_list": { "op": "extend", "value": [{ "key": "foo", "value": "bar" }, { "key": "foo", "value": "bar" }], } }, [], ) assert len(content.bucket_list) == 101 assert json_compatible(content.bucket_list) == {"len": 101, "buckets": 11} assert len([b async for b in content.bucket_list.iter_buckets(content) ]) == 11 assert len([i async for i in content.bucket_list.iter_items(content)]) == 101 assert "bucketlist-bucket_list0" in content.__gannotations__
async def test_bucket_list_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() content._p_jar = mocks.MockTransaction() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'append', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert content.bucket_list.annotations_metadata[0]['len'] == 1 assert await content.bucket_list.get(content, 0, 0) == { 'key': 'foo', 'value': 'bar' } assert await content.bucket_list.get(content, 0, 1) is None assert await content.bucket_list.get(content, 1, 0) is None for _ in range(100): await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'append', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert len(content.bucket_list.annotations_metadata) == 11 assert content.bucket_list.annotations_metadata[0]['len'] == 10 assert content.bucket_list.annotations_metadata[5]['len'] == 10 assert content.bucket_list.annotations_metadata[10]['len'] == 1 await content.bucket_list.remove(content, 10, 0) assert content.bucket_list.annotations_metadata[10]['len'] == 0 await content.bucket_list.remove(content, 9, 0) assert content.bucket_list.annotations_metadata[9]['len'] == 9 assert len(content.bucket_list) == 99 await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'extend', 'value': [{ 'key': 'foo', 'value': 'bar' }, { 'key': 'foo', 'value': 'bar' }] } }, []) assert len(content.bucket_list) == 101 assert json_compatible(content.bucket_list) == { 'len': 101, 'buckets': 11 } assert len([b async for b in content.bucket_list.iter_buckets(content)]) == 11 assert len([i async for i in content.bucket_list.iter_items(content)]) == 101 assert 'bucketlist-bucket_list0' in content.__gannotations__
def serialize(self): field = self.get_field() result = {"type": self.field_type} if self.widget is not None: result["widget"] = self.widget # caching the field_attributes here improves performance dramatically if field.__class__ in FIELDS_CACHE: field_attributes = FIELDS_CACHE[field.__class__].copy() else: field_attributes = {} for schema in implementedBy(field.__class__).flattened(): field_attributes.update(get_fields(schema)) FIELDS_CACHE[field.__class__] = field_attributes for attribute_name in sorted(field_attributes.keys()): attribute_field = field_attributes[attribute_name] if attribute_name in self.filtered_attributes: continue element_name = attribute_field.__name__ attribute_field = attribute_field.bind(field) force = element_name in self.forced_fields value = attribute_field.get(field) # For 'default', 'missing_value' etc, we want to validate against # the imported field type itself, not the field type of the # attribute if ( element_name in self.field_type_attributes or element_name in self.non_validated_field_type_attributes ): attribute_field = field text = None if isinstance(value, bytes): text = value.decode("utf-8") elif isinstance(value, str): text = value elif IField.providedBy(value): serializer = get_multi_adapter((value, field, self.request), ISchemaFieldSerializeToJson) text = serializer.serialize() if "properties" in text: text = text["properties"] elif value is not None and (force or value != field.missing_value): text = json_compatible(value) if text: if attribute_name == "value_type": attribute_name = "items" result[attribute_name] = text if result["type"] == "object": if IJSONField.providedBy(field): result.update(field.json_schema) if IDict.providedBy(field): if "properties" not in result: result["properties"] = {} if field.value_type: field_serializer = get_multi_adapter( (field.value_type, self.schema, self.request), ISchemaFieldSerializeToJson ) result["additionalProperties"] = field_serializer.serialize() else: result["additionalProperties"] = True elif IObject.providedBy(field): schema_serializer = get_multi_adapter((field.schema, self.request), ISchemaSerializeToJson) result["properties"] = schema_serializer.serialize() if field.extra_values is not None: result.update(field.extra_values) return result
async def __call__(self, include=None, omit=None): self.include = include or [] self.omit = omit or [] parent = self.context.__parent__ if parent is not None: # We render the summary of the parent try: parent_summary = await get_multi_adapter( (parent, self.request), IResourceSerializeToJsonSummary)() except ComponentLookupError: parent_summary = {} else: parent_summary = {} factory = get_cached_factory(self.context.type_name) behaviors = [] for behavior_schema in factory.behaviors or (): behaviors.append(behavior_schema.__identifier__) result = { "@id": get_object_url(self.context, self.request), "@type": self.context.type_name, "@name": self.context.__name__, "@uid": self.context.uuid, "@static_behaviors": behaviors, "parent": parent_summary, # should be @parent "is_folderish": IFolder.providedBy(self.context), # eek, should be @folderish? "creation_date": json_compatible(self.context.creation_date), "modification_date": json_compatible(self.context.modification_date), } main_schema = factory.schema await self.get_schema(main_schema, self.context, result, False) # include can be one of: # - <field name> on content schema # - namespace.IBehavior # - namespace.IBehavior.field_name included_ifaces = [name for name in self.include if "." in name] included_ifaces.extend( [name.rsplit(".", 1)[0] for name in self.include if "." in name]) for behavior_schema, behavior in await get_all_behaviors(self.context, load=False): if "*" not in self.include: dotted_name = behavior_schema.__identifier__ if dotted_name in self.omit or (len(included_ifaces) > 0 and dotted_name not in included_ifaces): # make sure the schema isn't filtered continue if not getattr(behavior, "auto_serialize", True) and dotted_name not in included_ifaces: continue if IAsyncBehavior.implementedBy(behavior.__class__): # providedBy not working here? await behavior.load(create=False) await self.get_schema(behavior_schema, behavior, result, True) for post_serialize_processors in app_settings["post_serialize"]: await apply_coroutine(post_serialize_processors, self.context, result) return result
async def test_bucket_list_field(dummy_request): login() content = create_content() content.__txn__ = mocks.MockTransaction() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) await deserializer.set_schema(ITestSchema, content, { 'bucket_list': { 'op': 'append', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert content.bucket_list.annotations_metadata[0]['len'] == 1 assert await content.bucket_list.get(content, 0, 0) == { 'key': 'foo', 'value': 'bar' } assert await content.bucket_list.get(content, 0, 1) is None assert await content.bucket_list.get(content, 1, 0) is None for _ in range(100): await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'append', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert len(content.bucket_list.annotations_metadata) == 11 assert content.bucket_list.annotations_metadata[0]['len'] == 10 assert content.bucket_list.annotations_metadata[5]['len'] == 10 assert content.bucket_list.annotations_metadata[10]['len'] == 1 await content.bucket_list.remove(content, 10, 0) assert content.bucket_list.annotations_metadata[10]['len'] == 0 await content.bucket_list.remove(content, 9, 0) assert content.bucket_list.annotations_metadata[9]['len'] == 9 assert len(content.bucket_list) == 99 await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'extend', 'value': [{ 'key': 'foo', 'value': 'bar' }, { 'key': 'foo', 'value': 'bar' }] } }, []) assert len(content.bucket_list) == 101 assert json_compatible(content.bucket_list) == {'len': 101, 'buckets': 11} assert len([b async for b in content.bucket_list.iter_buckets(content) ]) == 11 assert len([i async for i in content.bucket_list.iter_items(content)]) == 101 assert 'bucketlist-bucket_list0' in content.__gannotations__
def serialize(self): field = self.get_field() result = {'type': self.field_type} # caching the field_attributes here improves performance dramatically if field.__class__ in FIELDS_CACHE: field_attributes = FIELDS_CACHE[field.__class__].copy() else: field_attributes = {} for schema in implementedBy(field.__class__).flattened(): field_attributes.update(get_fields(schema)) FIELDS_CACHE[field.__class__] = field_attributes for attribute_name in sorted(field_attributes.keys()): attribute_field = field_attributes[attribute_name] if attribute_name in self.filtered_attributes: continue element_name = attribute_field.__name__ attribute_field = attribute_field.bind(field) force = (element_name in self.forced_fields) value = attribute_field.get(field) # For 'default', 'missing_value' etc, we want to validate against # the imported field type itself, not the field type of the # attribute if element_name in self.field_type_attributes or \ element_name in self.non_validated_field_type_attributes: attribute_field = field text = None if isinstance(value, bytes): text = value.decode('utf-8') elif isinstance(value, str): text = value elif IField.providedBy(value): serializer = get_multi_adapter( (value, field, self.request), ISchemaFieldSerializeToJson) text = serializer.serialize() if 'properties' in text: text = text['properties'] elif value is not None and (force or value != field.missing_value): text = json_compatible(value) if text: if attribute_name == 'value_type': attribute_name = 'items' result[attribute_name] = text if result['type'] == 'object': if IJSONField.providedBy(field): result.update(field.json_schema) if IDict.providedBy(field): if field.value_type: field_serializer = get_multi_adapter( (field.value_type, self.schema, self.request), ISchemaFieldSerializeToJson) result['additionalProperties'] = field_serializer.serialize() else: result['additionalProperties'] = True elif IObject.providedBy(field): schema_serializer = get_multi_adapter((field.schema, self.request), ISchemaSerializeToJson) result['properties'] = schema_serializer.serialize() if field.extra_values is not None: result.update(field.extra_values) return result
async def __call__(self): field = self.get_field() result = {'type': self.field_type} # caching the field_attributes here improves performance dramatically if field.__class__ in FIELDS_CACHE: field_attributes = FIELDS_CACHE[field.__class__].copy() else: field_attributes = {} for schema in implementedBy(field.__class__).flattened(): field_attributes.update(get_fields(schema)) FIELDS_CACHE[field.__class__] = field_attributes for attribute_name in sorted(field_attributes.keys()): attribute_field = field_attributes[attribute_name] if attribute_name in self.filtered_attributes: continue element_name = attribute_field.__name__ attribute_field = attribute_field.bind(field) force = (element_name in self.forced_fields) value = attribute_field.get(field) # For 'default', 'missing_value' etc, we want to validate against # the imported field type itself, not the field type of the # attribute if element_name in self.field_type_attributes or \ element_name in self.non_validated_field_type_attributes: attribute_field = field text = None if isinstance(value, bytes): text = value.decode('utf-8') elif isinstance(value, str): text = value elif IField.providedBy(value): serializer = get_multi_adapter((value, field, self.request), ISchemaFieldSerializeToJson) text = await serializer() if 'properties' in text: text = text['properties'] elif value is not None and (force or value != field.missing_value): text = json_compatible(value) if text: if attribute_name == 'value_type': attribute_name = 'items' result[attribute_name] = text if result['type'] == 'object': if IJSONField.providedBy(field): result['properties'] = field.json_schema if IDict.providedBy(field): if field.value_type: field_serializer = get_multi_adapter( (field.value_type, self.schema, self.request), ISchemaFieldSerializeToJson) result['additionalProperties'] = await field_serializer() else: result['additionalProperties'] = True elif IObject.providedBy(field): schema_serializer = get_multi_adapter( (field.schema, self.request), ISchemaSerializeToJson) result['properties'] = await schema_serializer() if field.extra_values is not None: result.update(field.extra_values) return result
async def __call__(self, indexes=None, schemas=None): # For each type values = { "type_name": self.content.type_name, "tid": self.content.__serial__, "modification_date": json_compatible(self.content.modification_date), } if schemas is None: schemas = iter_schemata(self.content) for schema in schemas: try: behavior = schema(self.content) except TypeError: # Content can't adapt to schema, so we don't need to # index it anyway. Just continue continue loaded = False for field_name, index_data in merged_tagged_value_dict( schema, index.key).items(): index_name = index_data.get("index_name", field_name) if index_name in values or index_name in self.attempts: # you can override indexers so we do not want to index # the same value more than once continue self.attempts.append(index_name) try: # accessors we always reindex since we can't know if updated # from the indexes param--they are "fake" like indexes, not fields if "accessor" in index_data: if (indexes is None or not index_data.get("fields") or (len( set(index_data.get("fields", [])) & set(indexes)) > 0)): if not loaded: await self.load_behavior(behavior) loaded = True values[index_name] = await apply_coroutine( index_data["accessor"], behavior) elif (indexes is None or field_name in indexes or isinstance( getattr(type(self.content), field_name, None), property)): if not loaded: await self.load_behavior(behavior) loaded = True # in this case, properties are also dynamic so we have to make sure # to allow for them to be reindexed every time. values[index_name] = self.get_data( behavior, schema, field_name) except NoIndexField: pass for metadata_name in merged_tagged_value_list( schema, metadata.key): if (indexes is not None and metadata_name not in indexes and not isinstance( getattr(type(self.content), metadata_name, None), property)): # in this case, properties are also dynamic so we have to make sure # to allow for them to be reindexed every time. continue # skip if not loaded: await self.load_behavior(behavior) loaded = True try: values[metadata_name] = self.get_data( behavior, schema, metadata_name) except NoIndexField: pass return values
async def __call__(self): try: result = json_compatible(self.value) except (ComponentLookupError, TypeError): result = self.value return {"value": result}
async def test_bucket_dict_field(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) await deserializer.set_schema(ITestSchema, content, { "bucket_dict": { "op": "assign", "value": { "key": "foo", "value": "bar" } } }, []) assert content.bucket_dict.buckets[0]["len"] == 1 assert await content.bucket_dict.get(content, "foo") == "bar" assert await content.bucket_dict.get(content, "bar") is None inserted = {"foo": "bar"} for idx in range(100): key = uuid.uuid4().hex inserted[key] = str(idx) await deserializer.set_schema( ITestSchema, content, { "bucket_dict": { "op": "assign", "value": { "key": key, "value": str(idx) } } }, [], ) # number of buckets and sizes of each is random depending on keys assert len(content.bucket_dict) == 101 removed = list(inserted.keys())[-1] del inserted[removed] await content.bucket_dict.remove(content, removed) assert len(content.bucket_dict) == 100 one = list(inserted.keys())[-1] two = list(inserted.keys())[-2] await deserializer.set_schema( ITestSchema, content, { "bucket_dict": { "op": "update", "value": [{ "key": one, "value": "1" }, { "key": two, "value": "2" }] } }, [], ) inserted[one] = "1" inserted[two] = "2" assert len(content.bucket_dict) == 100 assert await content.bucket_dict.get(content, one) == "1" assert await content.bucket_dict.get(content, two) == "2" assert json_compatible(content.bucket_dict)["len"] == 100 assert len(content.bucket_dict.buckets) == len([ name for name in content.__gannotations__.keys() if name.startswith("bucketdict-") ]) # test everything completely sorted all_keys = [] all_values = [] for bucket in content.bucket_dict.buckets: annotation = await content.bucket_dict.get_annotation( content, anno_id=bucket["id"]) assert annotation["keys"] == sorted(annotation["keys"]) all_keys.extend(annotation["keys"]) all_values.extend(annotation["values"]) assert all_keys == sorted(all_keys) assert all_keys == sorted(inserted.keys()) # check all values as well for idx, key in enumerate(all_keys): assert inserted[key] == all_values[idx]
async def __call__(self): result = {'type': self.field_type} # caching the field_attributes here improves performance dramatically if self.field.__class__ in FIELDS_CACHE: field_attributes = FIELDS_CACHE[self.field.__class__].copy() else: field_attributes = {} for schema in implementedBy(self.field.__class__).flattened(): field_attributes.update(getFields(schema)) FIELDS_CACHE[self.field.__class__] = field_attributes for attribute_name in sorted(field_attributes.keys()): attribute_field = field_attributes[attribute_name] if attribute_name in self.filtered_attributes: continue element_name = attribute_field.__name__ attribute_field = attribute_field.bind(self.field) force = (element_name in self.forced_fields) value = attribute_field.get(self.field) # For 'default', 'missing_value' etc, we want to validate against # the imported field type itself, not the field type of the # attribute if element_name in self.field_type_attributes or \ element_name in self.non_validated_field_type_attributes: attribute_field = self.field text = None if isinstance(value, bytes): text = value.decode('utf-8') elif isinstance(value, str): text = value elif IField.providedBy(value): serializer = get_multi_adapter( (value, self.field, self.request), ISchemaFieldSerializeToJson) text = await serializer() elif value is not None and (force or value != self.field.missing_value): text = json_compatible(value) # handle i18n # if isinstance(value, Message): # child.set(ns('domain', I18N_NAMESPACE), value.domain) # if not value.default: # child.set(ns('translate', I18N_NAMESPACE), '') # else: # child.set(ns('translate', I18N_NAMESPACE), child.text) # child.text = converter.toUnicode(value.default) if text: if attribute_name == 'value_type': attribute_name = 'items' result[attribute_name] = text if result['type'] == 'object': if IJSONField.providedBy(self.field): result['properties'] = self.field.json_schema else: schema_serializer = get_multi_adapter( (self.field.schema, self.request), ISchemaSerializeToJson) result['properties'] = await schema_serializer() return result
async def __call__(self): return json_compatible(await self.get_value())