async def runit(type_name): print(f'Test content serialization with {type_name}') request = get_current_request() txn = mocks.MockTransaction() ob = await create_content(type_name, id='foobar') request._db_id = 'foobar' ob._p_jar = txn deserializer = get_multi_adapter((ob, request), IResourceDeserializeFromJson) data = { 'title': 'Foobar', 'guillotina.behaviors.dublincore.IDublinCore': { 'tags': ['foo', 'bar'] }, 'measures.configuration.ITestBehavior1': { 'foobar': '123' }, 'measures.configuration.ITestBehavior2': { 'foobar': '123' }, 'measures.configuration.ITestBehavior3': { 'foobar': '123' } } await deserializer(data, validate_all=True) start = time.time() for _ in range(ITERATIONS): serializer = get_multi_adapter((ob, request), IResourceSerializeToJson) await serializer() end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def default_get(context, request): """We show the available schemas.""" result = {} factory = get_cached_factory(context.type_name) result['static'] = [] for schema in factory.behaviors or (): result['static'].append(schema.__identifier__) # convert to list, could be frozenset result['dynamic'] = [b for b in context.__behaviors__] result['available'] = [] factory = get_cached_factory(context.type_name) for name, utility in get_utilities_for(IBehavior): serialize = False if name not in result['dynamic'] and name not in result['static']: adaptable = query_adapter( context, utility.interface, name='', default=None) if adaptable: result['available'].append(name) serialize = True schema_serializer = get_multi_adapter( (utility.interface, request), ISchemaSerializeToJson) result[name] = await schema_serializer() else: serialize = True if serialize: schema_serializer = get_multi_adapter( (utility.interface, request), ISchemaSerializeToJson) result[name] = await schema_serializer() return result
async def run2(): ob = test_utils.create_content() req = test_utils.get_mocked_request() start = time.time() print('Test multi adapter') for _ in range(ITERATIONS): get_multi_adapter((ob, req), IResourceDeserializeFromJson) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def test_serialize_resource_omit_field(dummy_request): content = create_content() serializer = get_multi_adapter( (content, dummy_request), IResourceSerializeToJson) result = await serializer(omit=['guillotina.behaviors.dublincore.IDublinCore.creators']) assert 'creators' not in result['guillotina.behaviors.dublincore.IDublinCore']
async def test_serialize_resource(dummy_request): content = create_content() serializer = get_multi_adapter( (content, dummy_request), IResourceSerializeToJson) result = await serializer() assert 'guillotina.behaviors.dublincore.IDublinCore' in result
async def runit(type_name): print(f'Test content creation with {type_name}') request = get_current_request() txn = mocks.MockTransaction() container = await create_content(type_name, id='container') container._p_jar = txn start = time.time() for _ in range(ITERATIONS): ob = await create_content(type_name, id='foobar') ob._p_jar = txn await notify(BeforeObjectAddedEvent(ob, container, 'foobar')) deserializer = get_multi_adapter((ob, request), IResourceDeserializeFromJson) data = { 'title': 'Foobar', 'guillotina.behaviors.dublincore.IDublinCore': { 'tags': ['foo', 'bar'] }, 'measures.configuration.ITestBehavior1': { 'foobar': '123' }, 'measures.configuration.ITestBehavior2': { 'foobar': '123' }, 'measures.configuration.ITestBehavior3': { 'foobar': '123' } } await deserializer(data, validate_all=True) await notify(ObjectAddedEvent(ob, container, 'foobar', payload=data)) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def test_delete_by_value_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': [1, 2] }, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1, 2] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': { 'op': 'remove', 'value': 2 } }, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': { 'op': 'remove', 'value': 99 } }, errors) assert len(errors) == 1 assert errors[0]['field'] == 'patch_list_int'
async def test_dates_bucket_list_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() content._p_jar = mocks.MockTransaction() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'append', 'value': '2018-06-05T12:35:30.865745+00:00' } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 1 await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'extend', 'value': [ '2019-06-05T12:35:30.865745+00:00', '2020-06-05T12:35:30.865745+00:00' ] } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 3
async def test_check_permission_deserialize_content(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) assert deserializer.check_permission('guillotina.ViewContent') assert deserializer.check_permission('guillotina.ViewContent') # with cache
async def get_all_types(context, request): types = [x[1] for x in get_utilities_for(IResourceFactory)] result = [] for x in types: serializer = get_multi_adapter((x, request), IFactorySerializeToJson) result.append(await serializer()) return result
async def test_patchfield_notdefined_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { "dict_of_obj": { "key1": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } }, "patch_dict_of_obj": { "key1": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } } }, errors) assert len(errors) == 0 # 'not_defined_field' is not part of INestFieldSchema so should not serialized and stored assert 'not_defined_field' not in content.dict_of_obj['key1'] assert 'not_defined_field' not in content.patch_dict_of_obj['key1'] await deserializer.set_schema( ITestSchema, content, { "patch_dict_of_obj": { "op": "assign", "value": { "key": "key1", "value": { "op": "append", "value": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } } } } }, errors) assert len(errors) == 0 assert 'not_defined_field' not in content.dict_of_obj['key1'] assert 'not_defined_field' not in content.patch_dict_of_obj['key1']
async def __call__(self): factory = self.factory result = { 'title': factory.type_name, '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'required': [], 'definitions': {}, 'properties': {} } # Base object class serialized for name, field in get_fields_in_order(factory.schema): if field.required: result['required'].append(name) serializer = get_multi_adapter( (field, factory.schema, self.request), ISchemaFieldSerializeToJson) result['properties'][name] = await serializer() invariants = [] for i in factory.schema.queryTaggedValue('invariants', []): invariants.append("%s.%s" % (i.__module__, i.__name__)) result['invariants'] = invariants # Behavior serialization for schema in factory.behaviors or (): schema_serializer = get_multi_adapter( (schema, self.request), ISchemaSerializeToJson) serialization = await schema_serializer() result['properties'][schema_serializer.name] = \ {'$ref': '#/definitions/' + schema_serializer.name}, result['definitions'][schema_serializer.name] = serialization behavior = get_utility(IBehavior, name=schema_serializer.name) definition = result['definitions'][schema_serializer.name] definition['title'] = behavior.title or schema_serializer.short_name definition['description'] = behavior.description return result
async def test_patch_int_field_normal_path(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_int': 2 }, []) assert content.patch_int == 2
async def test_serialize_omit_main_interface_field(dummy_request): from guillotina.test_package import FileContent obj = create_content(FileContent, type_name='File') obj.file = DBFile(filename='foobar.json', size=25, md5='foobar') serializer = get_multi_adapter( (obj, dummy_request), IResourceSerializeToJson) result = await serializer(omit=['file']) assert 'file' not in result result = await serializer() assert 'file' in result
def serialize(self): for name, field in get_fields_in_order(self.schema): serializer = get_multi_adapter( (field, self.schema, self.request), ISchemaFieldSerializeToJson) self.schema_json['properties'][name] = serializer.serialize() if field.required: self.schema_json['required'].append(name) self.schema_json['invariants'] = self.invariants return self.schema_json
async def test_serialize_resource_include_field(dummy_request): from guillotina.test_package import FileContent obj = create_content(FileContent, type_name='File') obj.file = DBFile(filename='foobar.json', size=25, md5='foobar') serializer = get_multi_adapter( (obj, dummy_request), IResourceSerializeToJson) result = await serializer(include=['guillotina.behaviors.dublincore.IDublinCore.creators']) assert 'creators' in result['guillotina.behaviors.dublincore.IDublinCore'] assert len(result['guillotina.behaviors.dublincore.IDublinCore']) == 1 assert 'file' not in result
def __init__(self, context, request, field): self.context = context self.request = request self.field = field iface = import_class(app_settings['cloud_storage']) alsoProvides(field, iface) self.file_storage_manager = get_multi_adapter( (context, request, field), IFileStorageManager) self.dm = get_adapter( self.file_storage_manager, IUploadDataManager)
def convert_interfaces_to_schema(interfaces): properties = {} try: request = get_current_request() except RequestNotFound: from guillotina.tests.utils import get_mocked_request request = get_mocked_request() for iface in interfaces: serializer = get_multi_adapter( (iface, request), ISchemaSerializeToJson) properties[iface.__identifier__] = serializer.serialize() return properties
async def test_copy_file_ob(container_requester): async with container_requester as requester: _, status = await requester( 'POST', '/db/guillotina/', data=json.dumps({ '@type': 'Item', '@behaviors': [IAttachment.__identifier__], 'id': 'foobar' }) ) assert status == 201 response, status = await requester( 'PATCH', '/db/guillotina/foobar/@upload/file', data=b'X' * 1024 * 1024 * 4, headers={ 'x-upload-size': str(1024 * 1024 * 4) } ) assert status == 200 request = utils.get_mocked_request(requester.db) root = await utils.get_root(request) async with managed_transaction(request=request, abort_when_done=True): container = await root.async_get('guillotina') obj = await container.async_get('foobar') attachment = IAttachment(obj) await attachment.load() existing_bid = attachment.file._blob.bid cfm = get_multi_adapter( (obj, request, IAttachment['file'].bind(attachment)), IFileManager ) from_cfm = get_multi_adapter( (obj, request, IAttachment['file'].bind(attachment)), IFileManager ) await cfm.copy(from_cfm) assert existing_bid != attachment.file._blob.bid
async def test_unhandled_exceptions_in_bucket_dict_field_do_not_write_to_object( dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, {"bucket_dict": { "op": "assign", "value": None }}, errors) assert not hasattr(content, "bucket_dict") assert len(errors) == 1
def __init__(self, context, request, field): self.context = context self.request = request self.field = field iface = import_class(app_settings['cloud_storage']) alsoProvides(field, iface) self.file_storage_manager = get_multi_adapter( (context, request, field), IFileStorageManager) self.dm = get_adapter(self.file_storage_manager, IUploadDataManager, name=app_settings.get('cloud_datamanager') or 'db')
def __init__(self, context, request, field): self.context = context self.request = request self.field = field iface = resolve_dotted_name(app_settings["cloud_storage"]) alsoProvides(field, iface) self.file_storage_manager = get_multi_adapter( (context, request, field), IFileStorageManager) self.dm = get_adapter(self.file_storage_manager, IUploadDataManager, name=app_settings.get("cloud_datamanager") or "db")
def convert_interfaces_to_schema(interfaces): properties = {} try: request = get_current_request() except RequestNotFound: from guillotina.tests.utils import get_mocked_request request = get_mocked_request() for iface in interfaces: serializer = get_multi_adapter((iface, request), ISchemaSerializeToJson) properties[iface.__identifier__] = serializer.serialize() return properties
async def __call__(self): serializer = get_multi_adapter((self.context, self.request), IResourceSerializeToJson) include = omit = [] if self.request.query.get('include'): include = self.request.query.get('include').split(',') if self.request.query.get('omit'): omit = self.request.query.get('omit').split(',') try: result = await serializer(include=include, omit=omit) except TypeError: result = await serializer() await notify(ObjectVisitedEvent(self.context)) return result
async def test_constraint_error(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestValidation, content, {"constrained": "foobar"}, errors) assert len(errors) == 1 errors = [] await deserializer.set_schema(ITestValidation, content, {"constrained": "not foobar"}, errors) assert len(errors) == 0
async def test_fhir_field_schema_serializer(dummy_request): """ """ fhir_field = FhirField( title="Organization resource", resource_class="fhir.resources.organization.Organization", ) serializer = get_multi_adapter((fhir_field, IOrganization, dummy_request), ISchemaFieldSerializeToJson) schema_json = await serializer() assert schema_json['type'] == 'FhirField' assert schema_json[ 'resource_class'] == 'fhir.resources.organization.Organization'
async def __call__(self): registry = await get_registry() settings = registry.for_interface(IImagingSettings) scale_name = self.request.matchdict["scale"] allowed_sizes = settings["allowed_sizes"] if scale_name not in allowed_sizes: raise HTTPNotFound( content={"reason": f"{scale_name} is not supported"}) file = self.field.get(self.field.context or self.context) if file is None: raise HTTPNotFound( content={ "message": "File or custom filename required to download" }) adapter = get_multi_adapter((self.context, self.request, self.field), IFileManager) data = b"" async for chunk in adapter.iter_data(): data += chunk width, _, height = allowed_sizes[scale_name].partition(":") result, format_, size = scaleImage( data, int(width), int(height), quality=settings["quality"], direction="thumbnail", ) cors_renderer = app_settings["cors_renderer"](self.request) headers = await cors_renderer.get_headers() headers.update({ "CONTENT-DISPOSITION": 'attachment; filename="{}"'.format(file.filename) }) download_resp = Response( status=200, headers=headers, content_type=f"image/{format_}", content_length=len(result), ) await download_resp.prepare(self.request) await download_resp.write(result) await download_resp.write(eof=True) return download_resp
async def start(self, dm): """Init an upload. _uload_file_id : temporal url to image beeing uploaded _resumable_uri : uri to resumable upload _uri : finished uploaded image """ util = get_utility(IGCloudBlobStore) request = get_current_request() upload_file_id = dm.get("upload_file_id") if upload_file_id is not None: await self.delete_upload(upload_file_id) generator = get_multi_adapter((self.context, self.field), IFileNameGenerator) upload_file_id = await apply_coroutine(generator) init_url = "{}&name={}".format( UPLOAD_URL.format(bucket=await util.get_bucket_name()), quote_plus(upload_file_id), ) creator = get_authenticated_user_id() metadata = json.dumps({ "CREATOR": creator, "REQUEST": str(request), "NAME": dm.get("filename") }) call_size = len(metadata) async with util.session.post( init_url, headers={ "AUTHORIZATION": "Bearer {}".format(await util.get_access_token()), "X-Upload-Content-Type": to_str(dm.content_type), "X-Upload-Content-Length": str(dm.size), "Content-Type": "application/json; charset=UTF-8", "Content-Length": str(call_size), }, data=metadata, ) as call: if call.status != 200: text = await call.text() raise GoogleCloudException(f"{call.status}: {text}") resumable_uri = call.headers["Location"] await dm.update(current_upload=0, resumable_uri=resumable_uri, upload_file_id=upload_file_id)
async def __call__(self): factory = self.factory result = { 'title': factory.type_name, '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'required': [], 'definitions': {}, 'properties': {} } # Base object class serialized for name, field in getFieldsInOrder(factory.schema): if field.required: result['required'].append(name) serializer = get_multi_adapter( (field, factory.schema, self.request), ISchemaFieldSerializeToJson) result['properties'][name] = await serializer() invariants = [] for i in factory.schema.queryTaggedValue('invariants', []): invariants.append("%s.%s" % (i.__module__, i.__name__)) result['invariants'] = invariants # Behavior serialization for schema in factory.behaviors or (): schema_serializer = get_multi_adapter( (schema, self.request), ISchemaSerializeToJson) serialization = await schema_serializer() result['properties'][schema_serializer.name] = \ {'$ref': '#/definitions/' + schema_serializer.name}, result['definitions'][schema_serializer.name] = serialization return result
async def __call__(self): # We need to get the upload as async IO and look for an adapter # for the field to save there by chunks kwargs = {} if 'filename' in self.request.matchdict: kwargs['filename'] = self.request.matchdict['filename'] try: adapter = get_multi_adapter( (self.context, self.request, self.field), IFileManager) return await self.handle(adapter, kwargs) except AttributeError: # file does not exist return HTTPNotFound(content={ 'reason': 'File does not exist' })
async def test_patch_int_field_invalid_type(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) for op in ("inc", "dec", "reset"): errors = [] await deserializer.set_schema(ITestSchema, content, {"patch_int": { "op": op, "value": 3.3 }}, errors) assert getattr(content, "patch_int", 0) == 0 assert len(errors) == 1 assert isinstance(errors[0]["error"], WrongType)
async def test_patch_int_field_invalid_type(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) for op in ('inc', 'dec', 'reset'): errors = [] await deserializer.set_schema(ITestSchema, content, {'patch_int': { 'op': op, 'value': 3.3 }}, errors) assert getattr(content, 'patch_int', 0) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)
async def test_copy_file_ob(container_requester): async with container_requester as requester: _, status = await requester('POST', '/db/guillotina/', data=json.dumps({ '@type': 'Item', '@behaviors': [IAttachment.__identifier__], 'id': 'foobar' })) assert status == 201 response, status = await requester( 'PATCH', '/db/guillotina/foobar/@upload/file', data=b'X' * 1024 * 1024 * 4, headers={'x-upload-size': str(1024 * 1024 * 4)}) assert status == 200 request = utils.get_mocked_request(requester.db) root = await utils.get_root(request) async with managed_transaction(request=request, abort_when_done=True): container = await root.async_get('guillotina') obj = await container.async_get('foobar') attachment = IAttachment(obj) await attachment.load() existing_bid = attachment.file._blob.bid cfm = get_multi_adapter( (obj, request, IAttachment['file'].bind(attachment)), IFileManager) from_cfm = get_multi_adapter( (obj, request, IAttachment['file'].bind(attachment)), IFileManager) await cfm.copy(from_cfm) assert existing_bid != attachment.file._blob.bid
async def copy(self, to_storage_manager, to_dm): file = self.field.get(self.field.context or self.context) generator = get_multi_adapter((self.context, self.field), IFileNameGenerator) new_uri = await apply_coroutine(generator) _tmp_files[new_uri] = _tmp_files[file.uri] _tmp_files[new_uri] = tempfile.mkstemp()[1] copyfile(_tmp_files[file.uri], _tmp_files[new_uri]) await to_dm.finish( values={ "content_type": file.content_type, "size": file.size, "uri": new_uri, "filename": file.filename or "unknown", })
async def test_patch_list_field_invalid_type(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestSchema, content, {'patch_list': { 'op': 'append', 'value': 1 }}, errors) assert len(getattr(content, 'patch_list', [])) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], ValueDeserializationError)
async def test_patch_list_field_invalid_type(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestSchema, content, {"patch_list": { "op": "append", "value": 1 }}, errors) assert len(getattr(content, "patch_list", [])) == 0 assert len(errors) == 1 assert isinstance(errors[0]["error"], ValueDeserializationError)
async def get_field_value(context, request): field_name = request.matchdict["dotted_name"] if "." in field_name: # behavior field lookup iface_dotted = ".".join(field_name.split(".")[:-1]) field_name = field_name.split(".")[-1] try: schema = resolve_dotted_name(iface_dotted) except ModuleNotFoundError: return HTTPNotFound(content={"reason": f"Could resolve: {iface_dotted}"}) try: field = schema[field_name] except KeyError: return HTTPNotFound(content={"reason": f"No field: {field_name}"}) try: behavior = await get_behavior(context, schema) except AttributeError: return HTTPNotFound(content={"reason": f"Could not load behavior: {iface_dotted}"}) if behavior is None: return HTTPNotFound(content={"reason": f"Not valid behavior: {iface_dotted}"}) field = field.bind(behavior) field_context = behavior else: # main object field factory = get_cached_factory(context.type_name) schema = factory.schema try: field = schema[field_name] except KeyError: return HTTPNotFound(content={"reason": f"No field: {field_name}"}) field = field.bind(context) field_context = context # check permission read_permissions = merged_tagged_value_dict(schema, read_permission.key) serializer = get_multi_adapter((context, request), IResourceSerializeToJson) if not serializer.check_permission(read_permissions.get(field_name)): return HTTPUnauthorized(content={"reason": "You are not authorized to render this field"}) field_renderer = query_multi_adapter((context, request, field), IFieldValueRenderer) if field_renderer is None: return await serializer.serialize_field(field_context, field) else: return await field_renderer()
async def runit(type_name): print(f"Test content serialization with {type_name}") request = get_mocked_request() txn = mocks.MockTransaction() ob = await create_content(type_name, id="foobar") ob.__txn__ = txn deserializer = get_multi_adapter((ob, request), IResourceDeserializeFromJson) tags = [] for l1 in string.ascii_letters: tags.append(l1) for l2 in string.ascii_letters: tags.append(l1 + l2) # for l3 in string.ascii_letters: # tags.append(l1 + l2 + l3) print(f"{len(tags)} tags") data = { "title": "Foobar", "guillotina.behaviors.dublincore.IDublinCore": { "tags": tags, "creation_date": "2020-01-02T19:07:48.748922Z", "effective_date": "2020-01-02T19:07:48.748922Z", "expiration_date": "2020-01-02T19:07:48.748922Z", "creators": [ "".join(i) for i in zip(string.ascii_letters, string.ascii_letters) ], }, "measures.configuration.ITestBehavior1": { "foobar": "123" }, "measures.configuration.ITestBehavior2": { "foobar": "123" }, "measures.configuration.ITestBehavior3": { "foobar": "123" }, } start = time.time() for _ in range(ITERATIONS): await deserializer(data, validate_all=True) end = time.time() print(f"Done with {ITERATIONS} in {end - start} seconds")
async def __call__(self): settings = self.request.container_settings.for_interface( IImagingSettings) scale_name = self.request.matchdict['scale'] allowed_sizes = settings['allowed_sizes'] if scale_name not in allowed_sizes: raise HTTPNotFound( content={'reason': f'{scale_name} is not supported'}) file = self.field.get(self.field.context or self.context) if file is None: raise HTTPNotFound( content={ 'message': 'File or custom filename required to download' }) adapter = get_multi_adapter((self.context, self.request, self.field), IFileManager) data = b'' async for chunk in adapter.iter_data(): data += chunk width, _, height = allowed_sizes[scale_name].partition(':') result, format_, size = scaleImage(data, int(width), int(height), quality=settings['quality'], direction='thumbnail') cors_renderer = app_settings['cors_renderer'](self.request) headers = await cors_renderer.get_headers() headers.update({ 'CONTENT-DISPOSITION': 'attachment; filename="{}"'.format(file.filename) }) download_resp = StreamResponse(headers=headers) download_resp.content_type = f'image/{format_}' if file.size: download_resp.content_length = len(result) await download_resp.prepare(self.request) await download_resp.write(result) await download_resp.drain() await download_resp.write_eof() return download_resp
async def test_validator_error(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestValidation, content, { "validated_text": "foo", "foo": "foo" }, errors) assert len(errors) == 1 errors = [] content = create_content() await deserializer.set_schema(ITestValidation, content, {"validated_text": "foo"}, errors) assert len(errors) == 0
async def test_patch_list_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'append', 'value': 1 } }, errors) assert len(getattr(content, 'patch_list', [])) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], ValueDeserializationError)
async def test_patch_int_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) for op in ('inc', 'dec', 'reset'): errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': op, 'value': 3.3 } }, errors) assert getattr(content, 'patch_int', 0) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)
async def create(self, payload: dict, in_: IResource = None) -> IResource: await self.get_transaction() if in_ is None: in_ = self.db view = get_multi_adapter((in_, self.request), app_settings["http_methods"]["POST"], name="") async def json(): return payload self.request.json = json resp = await view() await self.commit() path = resp.headers["Location"] if path.startswith("http://") or path.startswith("https://"): # strip off container prefix container_url = get_object_url(in_, self.request) # type: ignore path = path[len(container_url or "") :] return await navigate_to(in_, path.strip("/")) # type: ignore
async def test_patch_dict_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert len(content.patch_dict) == 1 assert content.patch_dict['foo'] == 'bar' await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 'foo2', 'value': 'bar2' } } }, []) assert len(content.patch_dict) == 2 assert content.patch_dict['foo2'] == 'bar2' await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'del', 'value': 'foo2' } }, []) assert len(content.patch_dict) == 1 assert 'foo2' not in content.patch_dict
async def test_patch_dict_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert len(content.patch_dict) == 1 assert content.patch_dict['foo'] == 'bar' await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 'foo2', 'value': 'bar2' } } }, []) assert len(content.patch_dict) == 2 assert content.patch_dict['foo2'] == 'bar2' await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'del', 'value': 'foo2' } }, []) assert len(content.patch_dict) == 1 assert 'foo2' not in content.patch_dict
async def create(self, payload: dict, in_: IResource=None) -> IResource: await self.get_transaction() if in_ is None: in_ = self.db view = get_multi_adapter( (in_, self.request), app_settings['http_methods']['POST'], name='') async def json(): return payload self.request.json = json resp = await view() await self.commit() path = resp.headers['Location'] if path.startswith('http://') or path.startswith('https://'): # strip off container prefix container_url = get_object_url(in_, self.request) # type: ignore path = path[len(container_url or ''):] return await navigate_to(in_, path.strip('/')) # type: ignore
async def test_patch_dict_field_invalid_type(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, {'patch_dict': { 'op': 'assign', 'value': { 'key': 1, 'value': 'bar2' } }}, errors) assert len(getattr(content, 'patch_dict', {})) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)
async def copy(self, to_storage_manager, to_dm): file = self.field.get(self.field.context or self.context) if not _is_uploaded_file(file): raise HTTPNotFound( content={"reason": "To copy a uri must be set on the object"}) generator = get_multi_adapter((self.context, self.field), IFileNameGenerator) new_uri = await apply_coroutine(generator) util = get_utility(IGCloudBlobStore) bucket_name = await util.get_bucket_name() url = "{}/{}/o/{}/copyTo/b/{}/o/{}".format( OBJECT_BASE_URL, bucket_name, quote_plus(file.uri), bucket_name, quote_plus(new_uri), ) async with util.session.post( url, headers={ "AUTHORIZATION": "Bearer {}".format(await util.get_access_token()), "Content-Type": "application/json", }, ) as resp: if resp.status == 404: text = await resp.text() reason = ( f"Could not copy file: {file.uri} to {new_uri}:404: {text}" # noqa ) log.warning(reason) raise HTTPNotFound(content={"reason": reason}) else: data = await resp.json() assert data["name"] == new_uri await to_dm.finish( values={ "content_type": data["contentType"], "size": int(data["size"]), "uri": new_uri, "filename": file.filename or "unknown", })
async def test_patch_dict_field_invalid_type(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, {"patch_dict": { "op": "assign", "value": { "key": 1, "value": "bar2" } }}, errors) assert len(getattr(content, "patch_dict", {})) == 0 assert len(errors) == 1 assert isinstance(errors[0]["error"], WrongType)
def test_all(dummy_request): mapping = [ (schema.Object(schema=IResource), serialize_schema_field.DefaultSchemaFieldSerializer), (schema.Text(), serialize_schema_field.DefaultTextSchemaFieldSerializer), (schema.TextLine(), serialize_schema_field.DefaultTextLineSchemaFieldSerializer), (schema.Float(), serialize_schema_field.DefaultFloatSchemaFieldSerializer), (schema.Int(), serialize_schema_field.DefaultIntSchemaFieldSerializer), (schema.Bool(), serialize_schema_field.DefaultBoolSchemaFieldSerializer), (schema.List(), serialize_schema_field.DefaultCollectionSchemaFieldSerializer), (schema.Choice(values=("one", "two")), serialize_schema_field.DefaultChoiceSchemaFieldSerializer), (schema.Object(schema=IResource), serialize_schema_field.DefaultObjectSchemaFieldSerializer), (schema.Date(), serialize_schema_field.DefaultDateSchemaFieldSerializer), (schema.Time(), serialize_schema_field.DefaultTimeSchemaFieldSerializer), (schema.Dict(), serialize_schema_field.DefaultDictSchemaFieldSerializer), (schema.Datetime(), serialize_schema_field.DefaultDateTimeSchemaFieldSerializer), ] container = Container() for field, klass in mapping: adapter = get_multi_adapter((field, container, dummy_request), interface=ISchemaFieldSerializeToJson) assert isinstance(adapter, klass)
async def test_patch_dict_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 1, 'value': 'bar2' } } }, errors) assert len(getattr(content, 'patch_dict', {})) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)
async def controlpanel_element(context, request): url = getMultiAdapter((context, request), IAbsoluteURL)() type_id = request.matchdict["type_id"] registry = await get_registry() result = { "@id": f"{url}/@controlpanels/{type_id}", "group": "General", "title": "Validations Settings", "data": {}, } controlpanels = app_settings.get("controlpanels", {}) if type_id in controlpanels: schema = controlpanels[type_id].get("schema", None) if schema is None: return schemaObj = resolve_dotted_name(schema) config = registry.for_interface(schemaObj) schema = {"properties": {}, "fieldsets": [], "required": []} data = {} fields = [] for name, field in get_fields_in_order(schemaObj): if field.required: result["required"].append(name) serializer = get_multi_adapter((field, schemaObj, request), ISchemaFieldSerializeToJson) schema["properties"][name] = await serializer() data[name] = config.__getitem__(name) fields.append(name) schema["fieldsets"] = [{ "fields": fields, "id": "default", "title": "default" }] result["schema"] = schema result["data"] = data return result
async def test_bucket_dict_field_splitting(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) for idx in range(10): key = str(idx) await deserializer.set_schema( ITestSchema, content, { "bucket_dict": { "op": "assign", "value": { "key": key, "value": str(idx) } } }, [], ) assert len(mock_txn.added) == 1 # should all be in one bucket mock_txn.added.clear() # now test inserting one that will split the buckets and insert into new bucket. await deserializer.set_schema( ITestSchema, content, { "bucket_dict": { "op": "assign", "value": { "key": "ZZZZZ", "value": "foobar" } } }, [], ) assert len( mock_txn.added) == 2 # should write to existing bucket and new one
async def deserialize_cloud_field(field, value, context): # It supports base64 web value or a dict data_context = context if IContentBehavior.implementedBy(context.__class__): field = field.bind(context) context = context.context else: field = field.bind(context) if isinstance(value, dict): try: file_ob = field.get(data_context) except AttributeError: file_ob = None if file_ob is not None: # update file fields for key, item_value in value.items(): if key in serialize_mappings: setattr(file_ob, serialize_mappings[key], item_value) data_context.register() if "data" in value: value["data"] = base64.b64decode(value["data"]) else: # already updated necessary values return file_ob else: # base64 web value value = convert_base64_to_binary(value) # There is not file and expecting a dict # 'data', 'encoding', 'content-type', 'filename' request = get_current_request() file_manager = get_multi_adapter((context, request, field), IFileManager) content_type = value.get("content_type", value.get("content-type")) filename = value.get("filename", None) val = await file_manager.save_file(partial(_generator, value), content_type=content_type, size=len(value["data"]), filename=filename) return val
async def deserialize_cloud_field(field, value, context): # It supports base64 web value or a dict data_context = context if IContentBehavior.implementedBy(context.__class__): field = field.bind(context) context = context.context else: field = field.bind(context) if isinstance(value, dict): try: file_ob = field.get(data_context) except AttributeError: file_ob = None if file_ob is not None: # update file fields for key, item_value in value.items(): if key in serialize_mappings: setattr(file_ob, serialize_mappings[key], item_value) data_context._p_register() if 'data' in value: value['data'] = base64.b64decode(value['data']) else: # already updated necessary values return file_ob else: # base64 web value value = convert_base64_to_binary(value) # There is not file and expecting a dict # 'data', 'encoding', 'content-type', 'filename' request = get_current_request() file_manager = get_multi_adapter((context, request, field), IFileManager) content_type = value.get('content_type', value.get('content-type')) filename = value.get('filename', None) val = await file_manager.save_file( partial(_generator, value), content_type=content_type, size=len(value['data']), filename=filename) return val
async def test_serialize_cloud_file(dummy_request, dummy_guillotina): request = dummy_request request._txn = mocks.MockTransaction() from guillotina.test_package import FileContent, IFileContent from guillotina.interfaces import IFileManager obj = create_content(FileContent) obj.file = DBFile(filename='foobar.json', md5='foobar') fm = get_multi_adapter( (obj, request, IFileContent['file'].bind(obj)), IFileManager) await fm.dm.load() await fm.file_storage_manager.start(fm.dm) async def _data(): yield b'{"foo": "bar"}' await fm.file_storage_manager.append(fm.dm, _data(), 0) await fm.file_storage_manager.finish(fm.dm) await fm.dm.finish() value = json_compatible(obj.file) assert value['filename'] == 'foobar.json' assert value['size'] == 14 assert value['md5'] == 'foobar'
async def __call__(self): serializer = get_multi_adapter( (self.context, self.request), IResourceSerializeToJson) return await serializer()
async def __call__(self): serializer = get_multi_adapter( (self.value, self.request), IFactorySerializeToJson) return await serializer()
async def get(context, request): serializer = get_multi_adapter( (context, request), IResourceSerializeToJson) return await serializer()
def serialize(self): field = self.get_field() result = {'type': self.field_type} # caching the field_attributes here improves performance dramatically if field.__class__ in FIELDS_CACHE: field_attributes = FIELDS_CACHE[field.__class__].copy() else: field_attributes = {} for schema in implementedBy(field.__class__).flattened(): field_attributes.update(get_fields(schema)) FIELDS_CACHE[field.__class__] = field_attributes for attribute_name in sorted(field_attributes.keys()): attribute_field = field_attributes[attribute_name] if attribute_name in self.filtered_attributes: continue element_name = attribute_field.__name__ attribute_field = attribute_field.bind(field) force = (element_name in self.forced_fields) value = attribute_field.get(field) # For 'default', 'missing_value' etc, we want to validate against # the imported field type itself, not the field type of the # attribute if element_name in self.field_type_attributes or \ element_name in self.non_validated_field_type_attributes: attribute_field = field text = None if isinstance(value, bytes): text = value.decode('utf-8') elif isinstance(value, str): text = value elif IField.providedBy(value): serializer = get_multi_adapter( (value, field, self.request), ISchemaFieldSerializeToJson) text = serializer.serialize() if 'properties' in text: text = text['properties'] elif value is not None and (force or value != field.missing_value): text = json_compatible(value) if text: if attribute_name == 'value_type': attribute_name = 'items' result[attribute_name] = text if result['type'] == 'object': if IJSONField.providedBy(field): result.update(field.json_schema) if IDict.providedBy(field): if field.value_type: field_serializer = get_multi_adapter( (field.value_type, self.schema, self.request), ISchemaFieldSerializeToJson) result['additionalProperties'] = field_serializer.serialize() else: result['additionalProperties'] = True elif IObject.providedBy(field): schema_serializer = get_multi_adapter((field.schema, self.request), ISchemaSerializeToJson) result['properties'] = schema_serializer.serialize() if field.extra_values is not None: result.update(field.extra_values) return result