async def test_write_large_blob_data(db, guillotina_main): db = await get_database('db') login() async with transaction(db=db): container = await db.async_get('container') if container is None: container = await create_content_in_container(db, 'Container', 'container', title='Container') blob = Blob(container) container.blob = blob multiplier = 999999 blobfi = blob.open('w') await blobfi.async_write(b'foobar' * multiplier) async with transaction(db=db): container = await db.async_get('container') assert await container.blob.open().async_read() == (b'foobar' * multiplier) assert container.blob.size == len(b'foobar' * multiplier) assert container.blob.chunks == 6 await db.async_del('container')
async def test_delete_by_value_field(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestSchema, content, {"patch_list_int": [1, 2]}, errors) assert errors == [] assert getattr(content, "patch_list_int", []) == [1, 2] await deserializer.set_schema( ITestSchema, content, {"patch_list_int": { "op": "remove", "value": 2 }}, errors) assert errors == [] assert getattr(content, "patch_list_int", []) == [1] await deserializer.set_schema( ITestSchema, content, {"patch_list_int": { "op": "remove", "value": 99 }}, errors) assert len(errors) == 1 assert errors[0]["field"] == "patch_list_int"
async def test_creator_used_from_content_creation(self, dummy_request): self.request = dummy_request utils.login(self.request) container = await create_content( 'Container', id='guillotina', title='Guillotina') container.__name__ = 'guillotina' utils._p_register(container) import guillotina.tests configure.register_configuration(Folder, dict( type_name="TestType2", behaviors=[], module=guillotina.tests # for registration initialization ), 'contenttype') root = get_utility(IApplication, name='root') configure.load_configuration( root.app.config, 'guillotina.tests', 'contenttype') root.app.config.execute_actions() load_cached_schema() obj = await create_content_in_container( container, 'TestType2', 'foobar', creators=('root',), contributors=('root',)) assert obj.creators == ('root',) assert obj.contributors == ('root',) behavior = IDublinCore(obj) assert behavior.creators == ('root',) assert behavior.contributors == ('root',)
async def test_iterate_storage(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", } ) for idx in range(20): own_dummy_request._payload = FakeContentReader() own_dummy_request._cache_data = b"" own_dummy_request._last_read_pos = 0 ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.upload() util = get_utility(IS3BlobStore) items = [] async for item in util.iterate_bucket(): items.append(item) assert len(items) == 20 await _cleanup()
async def test_store_file_in_cloud(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", } ) own_dummy_request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.upload() assert ob.file._upload_file_id is None assert ob.file.uri is not None assert ob.file.content_type == "image/gif" assert ob.file.filename == "test.gif" assert ob.file._size == len(_test_gif) assert ob.file.md5 is not None assert ob.__uuid__ in ob.file.uri assert len(await get_all_objects()) == 1 gmng = S3FileStorageManager(ob, own_dummy_request, IContent["file"].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_fulltext_query_pg_catalog(container_requester): from guillotina.contrib.catalog.pg import PGSearchUtility async with container_requester as requester: await requester( "POST", "/db/guillotina/", data=json.dumps({ "@type": "Item", "id": "item1", "title": "Something interesting about foobar" }), ) await requester( "POST", "/db/guillotina/", data=json.dumps({ "@type": "Item", "title": "Something else", "id": "item2" }), ) async with requester.db.get_transaction_manager( ) as tm, await tm.begin(): test_utils.login() root = await tm.get_root() container = await root.async_get("guillotina") util = PGSearchUtility() await util.initialize() results = await util.search(container, {"title": "something"}) assert len(results["items"]) == 2 results = await util.search(container, {"title": "interesting"}) assert len(results["items"]) == 1
async def test_write_large_blob_data(postgres, guillotina_main): root = get_utility(IApplication, name='root') db = root['db'] request = get_mocked_request(db) login(request) async with managed_transaction(request=request): container = await create_content_in_container( db, 'Container', 'container', request=request, title='Container') blob = Blob(container) container.blob = blob multiplier = 999999 blobfi = blob.open('w') await blobfi.async_write(b'foobar' * multiplier) async with managed_transaction(request=request): container = await db.async_get('container') assert await container.blob.open().async_read() == (b'foobar' * multiplier) assert container.blob.size == len(b'foobar' * multiplier) assert container.blob.chunks == 6 await db.async_del('container')
async def test_allowed_types(dummy_guillotina): utils.login() async with transaction(db=await get_database("db")): container = await create_content("Container", id="guillotina", title="Guillotina") container.__name__ = "guillotina" utils.register(container) import guillotina.tests configure.register_configuration( Folder, dict( type_name="TestType", allowed_types=["Item"], module=guillotina.tests, # for registration initialization ), "contenttype", ) root = get_utility(IApplication, name="root") configure.load_configuration(root.app.config, "guillotina.tests", "contenttype") root.app.config.execute_actions() load_cached_schema() obj = await create_content_in_container(container, "TestType", "foobar") constrains = IConstrainTypes(obj, None) assert constrains.get_allowed_types() == ["Item"] assert constrains.is_type_allowed("Item") with pytest.raises(NotAllowedContentType): await create_content_in_container(obj, "TestType", "foobar") await create_content_in_container(obj, "Item", "foobar")
def __init__(self): self.request = get_mocked_request() login(self.request) parser = self.get_parser() arguments = parser.parse_args() if os.path.exists(arguments.configuration): with open(arguments.configuration, 'r') as config: settings = json.load(config) else: logger.warn( 'Could not find the configuration file {}. Using default settings.' .format(arguments.configuration)) settings = MISSING_SETTINGS.copy() app = self.make_app(settings) logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) if arguments.debug: logger.setLevel(logging.DEBUG) logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) ch.setLevel(logging.DEBUG) if asyncio.iscoroutinefunction(self.run): loop = asyncio.get_event_loop() # Blocking call which returns when finished loop.run_until_complete(self.run(arguments, settings, app)) loop.close() else: self.run(arguments, settings, app)
async def test_create_annotation(db, guillotina_main): root = get_utility(IApplication, name='root') db = root['db'] request = get_mocked_request(db) login(request) async with managed_transaction(request=request, write=True): container = await create_content_in_container( db, 'Container', 'container', request=request, title='Container') ob = await create_content_in_container( container, 'Item', 'foobar', request=request) annotations = IAnnotations(ob) data = AnnotationData() data['foo'] = 'bar' await annotations.async_set('foobar', data) async with managed_transaction(request=request, write=True): container = await db.async_get('container') ob = await container.async_get('foobar') annotations = IAnnotations(ob) assert 'foobar' in (await annotations.async_keys()) await annotations.async_del('foobar') async with managed_transaction(request=request, write=True): container = await db.async_get('container') ob = await container.async_get('foobar') annotations = IAnnotations(ob) assert 'foobar' not in (await annotations.async_keys()) await container.async_del('foobar') await db.async_del('container')
async def test_dates_bucket_list_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() content._p_jar = mocks.MockTransaction() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'append', 'value': '2018-06-05T12:35:30.865745+00:00' } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 1 await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'extend', 'value': [ '2019-06-05T12:35:30.865745+00:00', '2020-06-05T12:35:30.865745+00:00' ] } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 3
async def test_raises_not_retryable(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: dummy_request._container_id = "test-container" await _cleanup() file_data = b"" # we want to test multiple chunks here... while len(file_data) < MAX_REQUEST_CACHE_SIZE: file_data += _test_gif dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(file_data).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(file_data), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader(file_data) ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() dummy_request._retry_attempt = 1 with pytest.raises(UnRetryableRequestError): await mng.upload()
async def test_download(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() file_data = b"" # we want to test multiple chunks here... while len(file_data) < CHUNK_SIZE: file_data += _test_gif dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(file_data).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(file_data), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader(file_data) dummy_request.send = FakeContentSend() ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert ob.file.upload_file_id is None assert ob.file.uri is not None resp = await mng.download() assert int(resp.content_length) == len(file_data)
async def test_upload_works_with_plus_id(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() parent = create_content(id="foobar") ob = create_content(id="*****@*****.**", parent=parent) ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert getattr(ob.file, "upload_file_id", None) is None assert ob.file.uri is not None items = await get_all_objects() assert len(items) == 1 assert items[0]["name"] == ob.file.uri
async def test_patch_int_field_normal_path(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) await deserializer.set_schema(ITestSchema, content, {"patch_int": 2}, []) assert content.patch_int == 2
async def test_dates_bucket_list_field(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { "datetime_bucket_list": { "op": "append", "value": "2018-06-05T12:35:30.865745+00:00" } }, [], ) assert content.datetime_bucket_list.annotations_metadata[0]["len"] == 1 await deserializer.set_schema( ITestSchema, content, { "datetime_bucket_list": { "op": "extend", "value": [ "2019-06-05T12:35:30.865745+00:00", "2020-06-05T12:35:30.865745+00:00" ], } }, [], ) assert content.datetime_bucket_list.annotations_metadata[0]["len"] == 3
async def test_dates_bucket_list_field(dummy_request): login() content = create_content() content.__txn__ = mocks.MockTransaction() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'append', 'value': '2018-06-05T12:35:30.865745+00:00' } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 1 await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'extend', 'value': [ '2019-06-05T12:35:30.865745+00:00', '2020-06-05T12:35:30.865745+00:00' ] } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 3
async def run(self, arguments, settings, app): login() self.arguments = arguments root = get_utility(IApplication, name="root") for _id, db in root: if IDatabase.providedBy(db): await self.generate_test_data(db)
async def test_create_annotation(db, guillotina_main): db = await get_database("db") login() async with transaction(db=db): container = await create_content_in_container(db, "Container", "container", title="Container") ob = await create_content_in_container(container, "Item", "foobar") annotations = IAnnotations(ob) data = AnnotationData() data["foo"] = "bar" await annotations.async_set("foobar", data) async with transaction(db=db): container = await db.async_get("container") ob = await container.async_get("foobar") annotations = IAnnotations(ob) assert "foobar" in (await annotations.async_keys()) await annotations.async_del("foobar") async with transaction(db=db): container = await db.async_get("container") ob = await container.async_get("foobar") annotations = IAnnotations(ob) assert "foobar" not in (await annotations.async_keys()) await container.async_del("foobar") await db.async_del("container")
async def test_delete_by_value_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': [1, 2] }, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1, 2] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': { 'op': 'remove', 'value': 2 } }, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': { 'op': 'remove', 'value': 99 } }, errors) assert len(errors) == 1 assert errors[0]['field'] == 'patch_list_int'
async def test_creator_used_from_content_creation(dummy_guillotina): utils.login() async with transaction(db=await get_database("db")): container = await create_content("Container", id="guillotina", title="Guillotina") container.__name__ = "guillotina" utils.register(container) import guillotina.tests configure.register_configuration( Folder, dict( type_name="TestType2", behaviors=[], module=guillotina.tests ), # for registration initialization "contenttype", ) root = get_utility(IApplication, name="root") configure.load_configuration(root.app.config, "guillotina.tests", "contenttype") root.app.config.execute_actions() load_cached_schema() obj = await create_content_in_container( container, "TestType2", "foobar", creators=("root",), contributors=("root",) ) assert obj.creators == ("root",) assert obj.contributors == ("root",) behavior = IDublinCore(obj) assert behavior.creators == ("root",) assert behavior.contributors == ("root",)
async def test_write_large_blob_data(db, guillotina_main): root = get_utility(IApplication, name='root') db = root['db'] request = get_mocked_request(db) login(request) async with managed_transaction(request=request): container = await db.async_get('container') if container is None: container = await create_content_in_container( db, 'Container', 'container', request=request, title='Container') blob = Blob(container) container.blob = blob multiplier = 999999 blobfi = blob.open('w') await blobfi.async_write(b'foobar' * multiplier) async with managed_transaction(request=request): container = await db.async_get('container') assert await container.blob.open().async_read() == (b'foobar' * multiplier) assert container.blob.size == len(b'foobar' * multiplier) assert container.blob.chunks == 6 await db.async_del('container')
async def test_fulltext_query_pg_catalog(container_requester): from guillotina.contrib.catalog.pg import PGSearchUtility async with container_requester as requester: await requester('POST', '/db/guillotina/', data=json.dumps({ "@type": "Item", "id": "item1", "title": "Something interesting about foobar" })) await requester('POST', '/db/guillotina/', data=json.dumps({ "@type": "Item", "title": "Something else", "id": "item2", })) async with requester.db.get_transaction_manager( ) as tm, await tm.begin(): test_utils.login() root = await tm.get_root() container = await root.async_get('guillotina') util = PGSearchUtility() await util.initialize() results = await util.query(container, {'title': 'something'}) assert len(results['member']) == 2 results = await util.query(container, {'title': 'interesting'}) assert len(results['member']) == 1
async def test_add_behavior(dummy_guillotina): utils.login() async with transaction(db=await get_database("db")): container = await create_content("Container", id="guillotina", title="Guillotina") container.__name__ = "guillotina" utils.register(container) item = await create_content_in_container(container, "Item", id_="foobar") with pytest.raises(AttributeError): item.add_behavior(123) with pytest.raises(ComponentLookupError): item.add_behavior("foo") all_behaviors = await get_all_behaviors(item) assert len(all_behaviors) == 1 assert all_behaviors[0][0] == IDublinCore # IDublinCore already exists and check it is not added item.add_behavior(IDublinCore.__identifier__) assert len(item.__behaviors__) == 0 assert len(await get_all_behaviors(item)) == 1 # Manually add IDublinCore and check it is not returned twice item.__behaviors__ |= {IDublinCore.__identifier__} assert len(await get_all_behaviors(item)) == 1 item.add_behavior(IAttachment) assert len(await get_all_behaviors(item)) == 2
def run(self, arguments, settings, app): request = get_mocked_request() login() task_vars.request.set(request) change_transaction_strategy("none") loop = self.get_loop() loop.run_until_complete(self.reindex_all(arguments))
def transaction(self, request=None): if request is None: request = get_mocked_request(self.db) login(request) return wrap_request( request, managed_transaction( request=request, write=True, adopt_parent_txn=True))
async def test_inherit(container_requester): async with container_requester as requester: response, status = await requester( "POST", "/db/guillotina/", data=json.dumps({"@type": "Item", "id": "testing"}) ) assert status == 201 response, status = await requester( "POST", "/db/guillotina/@sharing", data=json.dumps( {"prinrole": [{"principal": "user1", "role": "guillotina.Reader", "setting": "Allow"}]} ), ) assert status == 200 response, status = await requester( "POST", "/db/guillotina/testing/@sharing", data=json.dumps({"perminhe": [{"permission": "guillotina.ViewContent", "setting": "Deny"}]}), ) assert status == 200 response, status = await requester("GET", "/db/guillotina/testing/@all_permissions") assert status == 200 container = await utils.get_container(requester=requester) content = await container.async_get("testing") user = GuillotinaUser("user1") utils.login(user=user) policy = get_security_policy() assert policy.check_permission("guillotina.ViewContent", container) assert not policy.check_permission("guillotina.ViewContent", content) response, status = await requester("GET", "/db/guillotina/testing") assert status == 401 response, status = await requester( "POST", "/db/guillotina/testing/@sharing", data=json.dumps( { "roleperm": [ { "permission": "guillotina.ViewContent", "role": "guillotina.Manager", "setting": "Allow", } ] } ), ) assert status == 200 response, status = await requester("GET", "/db/guillotina/testing") assert status == 200
async def test_read_range(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", } ) own_dummy_request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.upload() s3mng = S3FileStorageManager(ob, own_dummy_request, IContent["file"].bind(ob)) async for chunk in s3mng.read_range(0, 100): assert len(chunk) == 100 assert chunk == _test_gif[:100] async for chunk in s3mng.read_range(100, 200): assert len(chunk) == 100 assert chunk == _test_gif[100:200]
def run(self, arguments, settings, app): app_settings["root_user"]["password"] = TESTING_SETTINGS["root_user"][ "password"] root = get_utility(IApplication, name="root") request = get_mocked_request() login() helpers = ShellHelpers(app, root, request) task_vars.request.set(request) use_db = helpers.use_db # noqa use_container = helpers.use_container # noqa commit = helpers.commit # noqa abort = helpers.abort # noqa setup = helpers.setup_context # noqa try: from IPython.terminal.embed import InteractiveShellEmbed # type: ignore from traitlets.config.loader import Config # type: ignore except ImportError: sys.stderr.write("You must install ipython for the " "shell command to work.\n" "Use `pip install ipython` to install ipython.\n") return 1 cfg = Config() loop = self.get_loop() banner = loop.run_until_complete(self.get_banner()) ipshell = InteractiveShellEmbed(config=cfg, banner1=banner) ipshell()
async def test_delete_by_value_field(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestSchema, content, {'patch_list_int': [1, 2]}, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1, 2] await deserializer.set_schema( ITestSchema, content, {'patch_list_int': { 'op': 'remove', 'value': 2 }}, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1] await deserializer.set_schema( ITestSchema, content, {'patch_list_int': { 'op': 'remove', 'value': 99 }}, errors) assert len(errors) == 1 assert errors[0]['field'] == 'patch_list_int'
async def test_create_annotation(db, guillotina_main): db = await get_database('db') login() async with transaction(db=db): container = await create_content_in_container(db, 'Container', 'container', title='Container') ob = await create_content_in_container(container, 'Item', 'foobar') annotations = IAnnotations(ob) data = AnnotationData() data['foo'] = 'bar' await annotations.async_set('foobar', data) async with transaction(db=db): container = await db.async_get('container') ob = await container.async_get('foobar') annotations = IAnnotations(ob) assert 'foobar' in (await annotations.async_keys()) await annotations.async_del('foobar') async with transaction(db=db): container = await db.async_get('container') ob = await container.async_get('foobar') annotations = IAnnotations(ob) assert 'foobar' not in (await annotations.async_keys()) await container.async_del('foobar') await db.async_del('container')
async def test_allowed_types(self, dummy_request): self.request = dummy_request utils.login(self.request) container = await create_content( 'Container', id='guillotina', title='Guillotina') container.__name__ = 'guillotina' utils._p_register(container) import guillotina.tests configure.register_configuration(Folder, dict( type_name="TestType", allowed_types=['Item'], module=guillotina.tests # for registration initialization ), 'contenttype') root = get_utility(IApplication, name='root') configure.load_configuration( root.app.config, 'guillotina.tests', 'contenttype') root.app.config.execute_actions() load_cached_schema() obj = await create_content_in_container(container, 'TestType', 'foobar') constrains = IConstrainTypes(obj, None) assert constrains.get_allowed_types() == ['Item'] assert constrains.is_type_allowed('Item') with pytest.raises(NotAllowedContentType): await create_content_in_container(obj, 'TestType', 'foobar') await create_content_in_container(obj, 'Item', 'foobar')
def run(self, arguments, settings, app): app_settings['root_user']['password'] = TESTING_SETTINGS['root_user'][ 'password'] root = get_utility(IApplication, name='root') request = get_mocked_request() login() helpers = ShellHelpers(app, root, request) task_vars.request.set(request) use_db = helpers.use_db # noqa use_container = helpers.use_container # noqa commit = helpers.commit # noqa abort = helpers.abort # noqa try: from IPython.terminal.embed import InteractiveShellEmbed from traitlets.config.loader import Config except ImportError: sys.stderr.write('You must install ipython for the ' 'shell command to work.\n' 'Use `pip install ipython` to install ipython.\n') return 1 cfg = Config() ipshell = InteractiveShellEmbed(config=cfg, banner1=self.banner) ipshell()
async def test_check_permission_deserialize_content(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) assert deserializer.check_permission('guillotina.ViewContent') assert deserializer.check_permission('guillotina.ViewContent') # with cache
async def test_patchfield_notdefined_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { "dict_of_obj": { "key1": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } }, "patch_dict_of_obj": { "key1": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } } }, errors) assert len(errors) == 0 # 'not_defined_field' is not part of INestFieldSchema so should not serialized and stored assert 'not_defined_field' not in content.dict_of_obj['key1'] assert 'not_defined_field' not in content.patch_dict_of_obj['key1'] await deserializer.set_schema( ITestSchema, content, { "patch_dict_of_obj": { "op": "assign", "value": { "key": "key1", "value": { "op": "append", "value": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } } } } }, errors) assert len(errors) == 0 assert 'not_defined_field' not in content.dict_of_obj['key1'] assert 'not_defined_field' not in content.patch_dict_of_obj['key1']
async def test_patch_int_field_normal_path(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_int': 2 }, []) assert content.patch_int == 2
async def test_patch_list_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'append', 'value': 1 } }, errors) assert len(getattr(content, 'patch_list', [])) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], ValueDeserializationError)
async def test_patch_int_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) for op in ('inc', 'dec', 'reset'): errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': op, 'value': 3.3 } }, errors) assert getattr(content, 'patch_int', 0) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)
async def test_patch_dict_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert len(content.patch_dict) == 1 assert content.patch_dict['foo'] == 'bar' await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 'foo2', 'value': 'bar2' } } }, []) assert len(content.patch_dict) == 2 assert content.patch_dict['foo2'] == 'bar2' await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'del', 'value': 'foo2' } }, []) assert len(content.patch_dict) == 1 assert 'foo2' not in content.patch_dict
async def test_create_blob(db, guillotina_main): root = get_utility(IApplication, name='root') db = root['db'] request = get_mocked_request(db) login(request) async with managed_transaction(request=request): container = await create_content_in_container( db, 'Container', 'container', request=request, title='Container') blob = Blob(container) container.blob = blob async with managed_transaction(request=request): container = await db.async_get('container') assert blob.bid == container.blob.bid assert blob.resource_zoid == container._p_oid await db.async_del('container')
async def test_patch_dict_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'op': 'assign', 'value': { 'key': 1, 'value': 'bar2' } } }, errors) assert len(getattr(content, 'patch_dict', {})) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)
async def test_patch_int_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) # Increment it and check it adds to default value await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': 'inc', 'value': 3, } }, []) assert content.patch_int == 25 # Check that increments 1 if no value is passed await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': 'inc', } }, []) assert content.patch_int == 26 # Decrements 1 by default await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': 'dec', } }, []) assert content.patch_int == 25 # Decrement it await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': 'dec', 'value': 5, } }, []) assert content.patch_int == 20 # Check that we can have negative integers await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': 'dec', 'value': 25, } }, []) assert content.patch_int == -5 # Reset it to default value if not specified await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': 'reset' } }, []) assert content.patch_int == 22 # Reset it to specified value await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': 'reset', 'value': 400, } }, []) assert content.patch_int == 400 # Check that assumes value as 0 if there is no existing value and # no default value either assert getattr(content, 'patch_int_no_default', None) is None await deserializer.set_schema( ITestSchema, content, { 'patch_int_no_default': { 'op': 'inc' } }, []) assert content.patch_int_no_default == 1 content.patch_int_no_default = None await deserializer.set_schema( ITestSchema, content, { 'patch_int_no_default': { 'op': 'dec' } }, []) assert content.patch_int_no_default == -1 content.patch_int_no_default = None await deserializer.set_schema( ITestSchema, content, { 'patch_int_no_default': { 'op': 'reset' } }, []) assert content.patch_int_no_default == 0
async def test_bucket_list_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() content._p_jar = mocks.MockTransaction() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'append', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert content.bucket_list.annotations_metadata[0]['len'] == 1 assert await content.bucket_list.get(content, 0, 0) == { 'key': 'foo', 'value': 'bar' } assert await content.bucket_list.get(content, 0, 1) is None assert await content.bucket_list.get(content, 1, 0) is None for _ in range(100): await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'append', 'value': { 'key': 'foo', 'value': 'bar' } } }, []) assert len(content.bucket_list.annotations_metadata) == 11 assert content.bucket_list.annotations_metadata[0]['len'] == 10 assert content.bucket_list.annotations_metadata[5]['len'] == 10 assert content.bucket_list.annotations_metadata[10]['len'] == 1 await content.bucket_list.remove(content, 10, 0) assert content.bucket_list.annotations_metadata[10]['len'] == 0 await content.bucket_list.remove(content, 9, 0) assert content.bucket_list.annotations_metadata[9]['len'] == 9 assert len(content.bucket_list) == 99 await deserializer.set_schema( ITestSchema, content, { 'bucket_list': { 'op': 'extend', 'value': [{ 'key': 'foo', 'value': 'bar' }, { 'key': 'foo', 'value': 'bar' }] } }, []) assert len(content.bucket_list) == 101 assert json_compatible(content.bucket_list) == { 'len': 101, 'buckets': 11 } assert len([b async for b in content.bucket_list.iter_buckets(content)]) == 11 assert len([i async for i in content.bucket_list.iter_items(content)]) == 101 assert 'bucketlist-bucket_list0' in content.__gannotations__
async def test_nested_patch_deserialize(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { "nested_patch": { "op": "assign", "value": { "key": "foobar", "value": { "op": "append", "value": { "foo": "bar", "bar": 1, "foobar_list": None, "nested_int": { "op": "reset", "value": 5, } } } } } }, errors) assert len(errors) == 0 assert len(content.nested_patch) == 1 assert content.nested_patch['foobar'][0]['foo'] == 'bar' assert content.nested_patch['foobar'][0]['bar'] == 1 assert content.nested_patch['foobar'][0]['nested_int'] == 5 await deserializer.set_schema( ITestSchema, content, { "nested_patch": { "op": "assign", "value": { "key": "foobar", "value": { "op": "append", "value": { "foo": "bar2", "bar": 2 } } } } }, errors) assert len(errors) == 0 assert content.nested_patch['foobar'][1]['foo'] == 'bar2' assert content.nested_patch['foobar'][1]['bar'] == 2 await deserializer.set_schema( ITestSchema, content, { "nested_patch": { "op": "assign", "value": { "key": "foobar", "value": { "op": "update", "value": { "index": 1, "value": { "foo": "bar3", "bar": 3, "nested_int": { "op": "inc", } } } } } } }, errors) assert len(errors) == 0 assert content.nested_patch['foobar'][1]['foo'] == 'bar3' assert content.nested_patch['foobar'][1]['bar'] == 3 assert content.nested_patch['foobar'][1]['nested_int'] == 1
def test_get_authenticated_user_without_request(dummy_guillotina): db = get_db(dummy_guillotina, 'db') request = get_mocked_request(db) login(request) assert utils.get_authenticated_user() is not None
async def test_patch_list_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'append', 'value': { 'foo': 'bar' } } }, []) assert len(content.patch_list) == 1 assert content.patch_list[0] == {'foo': 'bar'} await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'append', 'value': { 'foo2': 'bar2' } } }, []) assert len(content.patch_list) == 2 assert content.patch_list[1] == {'foo2': 'bar2'} await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'extend', 'value': [{ 'foo3': 'bar3' }, { 'foo4': 'bar4' }] } }, []) assert len(content.patch_list) == 4 assert content.patch_list[-1] == {'foo4': 'bar4'} await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'update', 'value': { 'index': 3, 'value': { 'fooupdated': 'barupdated' } } } }, []) assert len(content.patch_list) == 4 assert content.patch_list[-1] == {'fooupdated': 'barupdated'} await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'del', 'value': 3 } }, []) assert len(content.patch_list) == 3
def setup_fake_request(self): self.request = get_mocked_request() login(self.request)