async def test_should_not_resolve_conflict_error_with_simple_strat(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db, 'simple') tm = TransactionManager(aps) # create object first, commit it... txn = await tm.begin() ob1 = create_content() ob2 = create_content() txn.register(ob1) txn.register(ob2) await tm.commit(txn=txn) # 1 started before 2 txn1 = await tm.begin() txn2 = await tm.begin() ob1 = await txn1.get(ob1._p_oid) ob2 = await txn2.get(ob2._p_oid) txn1.register(ob1) txn2.register(ob2) # commit 2 before 1 await tm.commit(txn=txn2) with pytest.raises(ConflictError): await tm.commit(txn=txn1) await aps.remove() await cleanup(aps)
def test_generate_oid_with_parent(): ob = utils.create_content() parent = ob.__parent__ = utils.create_content() parent.__parent__ = utils.create_content() zoid = oid.generate_oid(ob) assert len(zoid) == (oid.UUID_LENGTH + len(oid.OID_DELIMITER) + oid.OID_SPLIT_LENGTH) assert zoid.startswith(parent._p_oid[:oid.OID_SPLIT_LENGTH] + oid.OID_DELIMITER)
async def test_iterate_keys(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find # base aps uses 1 connection from the pool for starting transactions aps = await get_aps(db) tm = TransactionManager(aps) txn = await tm.begin() parent = create_content() txn.register(parent) original_keys = [] for _ in range(50): item = create_content() original_keys.append(item.id) item.__parent__ = parent txn.register(item) await tm.commit(txn=txn) txn = await tm.begin() keys = [] async for key in txn.iterate_keys(parent._p_oid, 2): keys.append(key) assert len(keys) == 50 assert len(set(keys) - set(original_keys)) == 0 await tm.abort(txn=txn)
async def test_create_blob(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db) tm = TransactionManager(aps) txn = await tm.begin() ob = create_content() txn.register(ob) await txn.write_blob_chunk('X' * 32, ob._p_oid, 0, b'foobar') await tm.commit(txn=txn) txn = await tm.begin() blob_record = await txn.read_blob_chunk('X' * 32, 0) assert blob_record['data'] == b'foobar' # also get data from ob that started as a stub... ob2 = await txn.get(ob._p_oid) assert ob2.type_name == 'Item' assert 'foobar' in ob2.id await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def test_delete_resource_deletes_blob(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db) tm = TransactionManager(aps) txn = await tm.begin() ob = create_content() txn.register(ob) await txn.write_blob_chunk('X' * 32, ob._p_oid, 0, b'foobar') await tm.commit(txn=txn) txn = await tm.begin() ob = await txn.get(ob._p_oid) txn.delete(ob) await tm.commit(txn=txn) await asyncio.sleep(0.1) # make sure cleanup runs txn = await tm.begin() assert await txn.read_blob_chunk('X' * 32, 0) is None with pytest.raises(KeyError): await txn.get(ob._p_oid) await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def test_serialize_resource_omit_field(dummy_request): content = create_content() serializer = get_multi_adapter( (content, dummy_request), IResourceSerializeToJson) result = await serializer(omit=['guillotina.behaviors.dublincore.IDublinCore.creators']) assert 'creators' not in result['guillotina.behaviors.dublincore.IDublinCore']
async def test_none_strat_allows_trans_commits(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db, 'none') tm = TransactionManager(aps) # create object first, commit it... txn = await tm.begin() ob1 = create_content() txn.register(ob1) await tm.commit(txn=txn) txn1 = await tm.begin() txn2 = await tm.begin() ob1 = await txn1.get(ob1._p_oid) ob2 = await txn2.get(ob1._p_oid) ob1.title = 'foobar1' ob2.title = 'foobar2' txn1.register(ob1) txn2.register(ob2) await tm.commit(txn=txn2) await tm.commit(txn=txn1) txn = await tm.begin() ob1 = await txn.get(ob1._p_oid) assert ob1.title == 'foobar1' await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def test_serialize_resource(dummy_request): content = create_content() serializer = get_multi_adapter( (content, dummy_request), IResourceSerializeToJson) result = await serializer() assert 'guillotina.behaviors.dublincore.IDublinCore' in result
async def test_get_resources_of_type(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db) tm = TransactionManager(aps) # create object first, commit it... txn = await tm.begin() ob = create_content() txn.register(ob) await tm.commit(txn=txn) txn = await tm.begin() count = 0 async for item in txn._get_resources_of_type('Item'): assert item['type'] == 'Item' count += 1 assert count == 1 await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def test_dates_bucket_list_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() content._p_jar = mocks.MockTransaction() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'append', 'value': '2018-06-05T12:35:30.865745+00:00' } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 1 await deserializer.set_schema( ITestSchema, content, { 'datetime_bucket_list': { 'op': 'extend', 'value': [ '2019-06-05T12:35:30.865745+00:00', '2020-06-05T12:35:30.865745+00:00' ] } }, []) assert content.datetime_bucket_list.annotations_metadata[0]['len'] == 3
async def test_handles_asyncpg_trying_txn_with_manual_txn(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db) tm = TransactionManager(aps) # simulate transaction already started(should not happen) for conn in tm._storage.pool._queue._queue: if conn._con is None: await conn.connect() await conn._con.execute('BEGIN;') txn = await tm.begin() # then, try doing stuff... ob = create_content() txn.register(ob) assert len(txn.modified) == 1 await tm.commit(txn=txn) txn = await tm.begin() ob2 = await txn.get(ob._p_oid) assert ob2._p_oid == ob._p_oid await tm.commit(txn=txn) await aps.remove() await cleanup(aps)
def test_get_owners(dummy_guillotina): content = create_content() roleperm = IPrincipalRoleManager(content) roleperm.assign_role_to_principal('guillotina.Owner', 'foobar') assert utils.get_owners(content) == ['foobar'] roleperm.assign_role_to_principal('guillotina.Owner', 'foobar2') assert utils.get_owners(content) == ['foobar', 'foobar2']
async def test_using_gather_with_queries_after_prepare(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db) tm = TransactionManager(aps) # create object first, commit it... txn = await tm.begin() ob1 = create_content() txn.register(ob1) await tm.commit(txn=txn) txn = await tm.begin() async def get_ob(): await txn.get(ob1._p_oid) # one initial call should load prepared statement await txn.get(ob1._p_oid) # before we introduction locking on the connection, this would error await asyncio.gather(get_ob(), get_ob(), get_ob(), get_ob(), get_ob()) await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def test_get_security_data(dummy_request): request = dummy_request # noqa ob = test_utils.create_content() adapter = get_adapter(ob, ISecurityInfo) data = adapter() assert 'access_users' in data assert 'access_roles' in data
async def test_restart_connection(db, dummy_request): """Low level test checks that root is not there""" request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db) tm = TransactionManager(aps) txn = await tm.begin() ob = create_content() txn.register(ob) assert len(txn.modified) == 1 await tm.commit(txn=txn) with pytest.raises(ConflictError): await aps.restart_connection() txn = await tm.begin() ob2 = await txn.get(ob._p_oid) assert ob2._p_oid == ob._p_oid await tm.commit(txn=txn) await aps.remove() await cleanup(aps)
async def test_delete_by_value_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': [1, 2] }, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1, 2] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': { 'op': 'remove', 'value': 2 } }, errors) assert errors == [] assert getattr(content, 'patch_list_int', []) == [1] await deserializer.set_schema( ITestSchema, content, { 'patch_list_int': { 'op': 'remove', 'value': 99 } }, errors) assert len(errors) == 1 assert errors[0]['field'] == 'patch_list_int'
async def test_check_permission_deserialize_content(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) assert deserializer.check_permission('guillotina.ViewContent') assert deserializer.check_permission('guillotina.ViewContent') # with cache
async def run1(): ob = test_utils.create_content() print('Test single adapter lookup') start = time.time() for _ in range(ITERATIONS): getAdapter(ob, ISecurityInfo) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def test_patchfield_notdefined_field(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { "dict_of_obj": { "key1": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } }, "patch_dict_of_obj": { "key1": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } } }, errors) assert len(errors) == 0 # 'not_defined_field' is not part of INestFieldSchema so should not serialized and stored assert 'not_defined_field' not in content.dict_of_obj['key1'] assert 'not_defined_field' not in content.patch_dict_of_obj['key1'] await deserializer.set_schema( ITestSchema, content, { "patch_dict_of_obj": { "op": "assign", "value": { "key": "key1", "value": { "op": "append", "value": { "foo": "bar", "bar": 1, # Value not found in schema "not_defined_field": "arbitrary-value" } } } } }, errors) assert len(errors) == 0 assert 'not_defined_field' not in content.dict_of_obj['key1'] assert 'not_defined_field' not in content.patch_dict_of_obj['key1']
async def run2(): ob = test_utils.create_content() req = test_utils.get_mocked_request() start = time.time() print('Test multi adapter') for _ in range(ITERATIONS): get_multi_adapter((ob, req), IResourceDeserializeFromJson) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def test_deleting_parent_deletes_children(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db) tm = TransactionManager(aps) txn = await tm.begin() folder = create_content(Folder, 'Folder') txn.register(folder) ob = create_content() await folder.async_set('foobar', ob) assert len(txn.modified) == 2 await tm.commit(txn=txn) txn = await tm.begin() ob2 = await txn.get(ob._p_oid) folder2 = await txn.get(folder._p_oid) assert ob2._p_oid == ob._p_oid assert folder2._p_oid == folder._p_oid # delete parent, children should be gone... txn.delete(folder2) assert len(txn.deleted) == 1 await tm.commit(txn=txn) # give delete task a chance to execute await asyncio.sleep(0.1) txn = await tm.begin() with pytest.raises(KeyError): await txn.get(ob._p_oid) with pytest.raises(KeyError): await txn.get(folder._p_oid) await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def test_serialize_resource_include_field(dummy_request): from guillotina.test_package import FileContent obj = create_content(FileContent, type_name='File') obj.file = DBFile(filename='foobar.json', size=25, md5='foobar') serializer = get_multi_adapter( (obj, dummy_request), IResourceSerializeToJson) result = await serializer(include=['guillotina.behaviors.dublincore.IDublinCore.creators']) assert 'creators' in result['guillotina.behaviors.dublincore.IDublinCore'] assert len(result['guillotina.behaviors.dublincore.IDublinCore']) == 1 assert 'file' not in result
async def test_patch_int_field_normal_path(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_int': 2 }, []) assert content.patch_int == 2
async def test_serialize_omit_main_interface_field(dummy_request): from guillotina.test_package import FileContent obj = create_content(FileContent, type_name='File') obj.file = DBFile(filename='foobar.json', size=25, md5='foobar') serializer = get_multi_adapter( (obj, dummy_request), IResourceSerializeToJson) result = await serializer(omit=['file']) assert 'file' not in result result = await serializer() assert 'file' in result
async def test_vacuum_cleans_orphaned_content(cockroach_storage, dummy_request): request = dummy_request # noqa async with cockroach_storage as storage: tm = TransactionManager(storage) txn = await tm.begin() folder1 = create_content() txn.register(folder1) folder2 = create_content() folder2.__parent__ = folder1 txn.register(folder2) item = create_content() item.__parent__ = folder2 txn.register(item) await tm.commit(txn=txn) txn = await tm.begin() folder1._p_jar = txn txn.delete(folder1) await tm.commit(txn=txn) await storage.vacuum() txn = await tm.begin() with pytest.raises(KeyError): await txn.get(folder1._p_oid) await tm.abort(txn=txn) with pytest.raises(KeyError): # dangling... await txn.get(item._p_oid) await tm.abort(txn=txn) with pytest.raises(KeyError): # dangling... await txn.get(folder2._p_oid) await tm.abort(txn=txn) await tm.abort(txn=txn)
async def test_do_not_cache_large_object(dummy_guillotina): tm = mocks.MockTransactionManager() storage = tm._storage txn = Transaction(tm) cache = MemoryCache(storage, txn) txn._cache = cache ob = create_content() ob.foobar = 'X' * cache.max_cache_record_size # push size above cache threshold storage.store(ob) loaded = await txn.get(ob._p_oid) assert id(loaded) != id(ob) assert loaded._p_oid == ob._p_oid assert len(cache._actions) == 0
async def test_parse_metadata(dummy_guillotina): from guillotina.contrib.catalog.pg import PGSearchUtility util = PGSearchUtility() with mocks.MockTransaction(): content = test_utils.create_content(Container) query = parse_query(content, {"_metadata": "foobar"}) result = util.load_meatdata(query, {"foobar": "foobar", "blah": "blah"}) assert result == {"foobar": "foobar"} query = parse_query(content, {"_metadata_not": "foobar"}) result = util.load_meatdata(query, {"foobar": "foobar", "blah": "blah"}) assert result == {"blah": "blah"}
async def test_deleting_parent_deletes_children(db, dummy_guillotina): aps = await get_aps(db) with TransactionManager(aps) as tm, await tm.begin() as txn: folder = create_content(Folder, 'Folder') txn.register(folder) ob = create_content() await folder.async_set('foobar', ob) assert len(txn.modified) == 2 await tm.commit(txn=txn) txn = await tm.begin() ob2 = await txn.get(ob.__uuid__) folder2 = await txn.get(folder.__uuid__) assert ob2.__uuid__ == ob.__uuid__ assert folder2.__uuid__ == folder.__uuid__ # delete parent, children should be gone... txn.delete(folder2) assert len(txn.deleted) == 1 await tm.commit(txn=txn) # give delete task a chance to execute await asyncio.sleep(0.1) txn = await tm.begin() with pytest.raises(KeyError): await txn.get(ob.__uuid__) with pytest.raises(KeyError): await txn.get(folder.__uuid__) await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def test_wait_for_lock(dummy_guillotina, etcd): strategy = _make_strategy() ob1 = utils.create_content() ob2 = utils.create_content() ob2._p_oid = ob1._p_oid await strategy.lock(ob1) result = [] async def work_on_object_1(): await asyncio.sleep(0.05) result.append(1) await strategy.unlock(ob1) async def attempt_to_lock_object_2(): await strategy.lock(ob2) # should wait for object 1 to get unlocked result.append(2) await asyncio.gather(attempt_to_lock_object_2(), work_on_object_1()) assert result == [1, 2]
async def test_vacuum_objects(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db, autovacuum=False) tm = TransactionManager(aps) # create objects first, commit it... txn = await tm.begin() ob1 = create_content() ob2 = create_content() txn.register(ob1) txn.register(ob2) await tm.commit(txn=txn) txn = await tm.begin() txn.delete(ob1) assert len(txn.deleted) == 1 await tm.commit(txn=txn) async with aps.pool.acquire() as conn: result = await conn.fetch( "select * from objects where zoid=$1;", ob1._p_oid) assert len(result) == 1 # deferenced assert result[0]['parent_id'] == 'D' * 32 await aps.vacuum() await asyncio.sleep(0.1) async with aps.pool.acquire() as conn: result = await conn.fetch( "select * from objects where zoid=$1;", ob1._p_oid) assert len(result) == 0 await aps.remove() await cleanup(aps)
async def test_vacuum_cleans_orphaned_content(cockroach_storage): async with cockroach_storage as storage: tm = TransactionManager(storage) txn = await tm.begin() folder1 = create_content() txn.register(folder1) folder2 = create_content() folder2.__parent__ = folder1 txn.register(folder2) item = create_content() item.__parent__ = folder2 txn.register(item) await tm.commit(txn=txn) txn = await tm.begin() folder1.__txn__ = txn txn.delete(folder1) await tm.commit(txn=txn) await storage.vacuum() txn = await tm.begin() with pytest.raises(KeyError): await txn.get(folder1.__uuid__) await tm.abort(txn=txn) with pytest.raises(KeyError): # dangling... await txn.get(item.__uuid__) await tm.abort(txn=txn) with pytest.raises(KeyError): # dangling... await txn.get(folder2.__uuid__) await tm.abort(txn=txn) await tm.abort(txn=txn)
async def test_vacuum_objects(db, dummy_guillotina): aps = await get_aps(db, autovacuum=False) tm = TransactionManager(aps) # create objects first, commit it... txn = await tm.begin() ob1 = create_content() ob2 = create_content() txn.register(ob1) txn.register(ob2) with txn, tm: await tm.commit(txn=txn) txn = await tm.begin() txn.delete(ob1) assert len(txn.deleted) == 1 await tm.commit(txn=txn) async with aps.pool.acquire() as conn: result = await conn.fetch("select * from objects where zoid=$1;", ob1.__uuid__) assert len(result) == 1 # deferenced assert result[0]["parent_id"] == "D" * 32 vacuumer = get_adapter(aps, IVacuumProvider) await vacuumer() await asyncio.sleep(0.1) async with aps.pool.acquire() as conn: result = await conn.fetch("select * from objects where zoid=$1;", ob1.__uuid__) assert len(result) == 0 await aps.remove() await cleanup(aps)
async def test_vacuum_objects(db, dummy_request): request = dummy_request # noqa so magically get_current_request can find aps = await get_aps(db, autovacuum=False) tm = TransactionManager(aps) # create objects first, commit it... txn = await tm.begin() ob1 = create_content() ob2 = create_content() txn.register(ob1) txn.register(ob2) await tm.commit(txn=txn) txn = await tm.begin() txn.delete(ob1) assert len(txn.deleted) == 1 await tm.commit(txn=txn) async with aps.pool.acquire() as conn: result = await conn.fetch("select * from objects where zoid=$1;", ob1._p_oid) assert len(result) == 1 # deferenced assert result[0]['parent_id'] == 'D' * 32 await aps.vacuum() await asyncio.sleep(0.1) async with aps.pool.acquire() as conn: result = await conn.fetch("select * from objects where zoid=$1;", ob1._p_oid) assert len(result) == 0 await aps.remove() await cleanup(aps)
async def test_cache_object_from_child(dummy_guillotina): tm = mocks.MockTransactionManager() storage = tm._storage txn = Transaction(tm) cache = MemoryCache(txn) txn._cache = cache ob = create_content() parent = create_content() ob.__parent__ = parent storage.store(parent) storage.store(ob) loaded = await txn.get_child(parent, ob.id) assert len(cache._actions) == 1 assert cache._actions[0]['action'] == 'stored' assert cache._hits == 0 loaded = await txn.get_child(parent, ob.id) assert cache._actions[-1]['action'] == 'loaded' assert cache._hits == 1 assert id(loaded) != id(ob) assert loaded._p_oid == ob._p_oid
async def run3(): lookup_registry = {ISecurityInfo: DefaultSecurityInfoAdapter} ob = test_utils.create_content() print("Test manual lookup") type_ = type(ob) start = time.time() for _ in range(ITERATIONS): for interface in type_.__implemented__.flattened(): # this returns in correct order if interface in lookup_registry: DefaultSecurityInfoAdapter(ob) end = time.time() print(f"Done with {ITERATIONS} in {end - start} seconds")
async def test_patch_dict_field_normal_patch(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) await deserializer.set_schema( ITestSchema, content, { 'patch_dict': { 'foo': 'bar' } }, []) assert len(content.patch_dict) == 1
def test_parser_term_and_terms(dummy_guillotina): content = test_utils.create_content() parser = Parser(None, content) params = {"depth__gte": "2", "type_name": "Item"} query = parser(params) qq = query["query"]["bool"]["must"] assert "_from" not in query assert qq[1]["term"]["type_name"] == "Item" params = {"depth__gte": "2", "type_name": ["Item", "Folder"]} query = parser(params) qq = query["query"]["bool"]["must"] assert "Item" in qq[1]["terms"]["type_name"] assert "Folder" in qq[1]["terms"]["type_name"]
async def test_pg_field_parser(dummy_guillotina): from guillotina.contrib.catalog.pg import Parser content = test_utils.create_content(Container) parser = Parser(None, content) # test convert operators for q1, q2 in (('gte', '>='), ('gt', '>'), ('eq', '='), ('lte', '<='), ('not', '!='), ('lt', '<')): where, value, select = parser.process_queried_field( f'depth__{q1}', '2') assert f' {q2} ' in where assert value == [2] # bad int assert parser.process_queried_field(f'depth__{q1}', 'foobar') is None # convert bool where, value, select = parser.process_queried_field( f'boolean_field', 'true') assert value == [True] where, value, select = parser.process_queried_field( f'boolean_field', 'false') assert value == [False] # none for invalid assert parser.process_queried_field(f'foobar', None) is None # convert to list where, value, select = parser.process_queried_field(f'tags__in', 'foo,bar') assert value == [['foo', 'bar']] assert ' ?| ' in where where, value, select = parser.process_queried_field(f'tags', 'bar') assert ' ? ' in where where, value, select = parser.process_queried_field( f'tags', ['foo', 'bar']) assert ' ?| ' in where # date parsing where, value, select = parser.process_queried_field( f'creation_date__gte', '2019-06-15T18:37:31.008359+00:00') assert isinstance(value[0], datetime) # path where, value, select = parser.process_queried_field(f'path', '/foo/bar') assert 'substring(json->>' in where # ft where, value, select = parser.process_queried_field(f'title', 'foobar') assert 'to_tsvector' in where
async def test_constraint_error(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestValidation, content, {"constrained": "foobar"}, errors) assert len(errors) == 1 errors = [] await deserializer.set_schema(ITestValidation, content, {"constrained": "not foobar"}, errors) assert len(errors) == 0
async def test_record_transaction_cache_hit_get_child_root( self, dummy_guillotina, metrics_registry): storage = AsyncMock() mng = TransactionManager(storage) cache = AsyncMock() cache.get.return_value = { "state": pickle.dumps(create_content()), "zoid": "foobar", "tid": 1, "id": "foobar", } strategy = AsyncMock() txn = Transaction(mng, cache=cache, strategy=strategy) ob = create_content(Container) await txn.get_child(ob, "foobar") assert (metrics_registry.get_sample_value("guillotina_cache_ops_total", { "type": "_get_child", "result": "hit_roots" }) == 1.0)
async def test_unhandled_exceptions_in_bucket_dict_field_do_not_write_to_object( dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, {"bucket_dict": { "op": "assign", "value": None }}, errors) assert not hasattr(content, "bucket_dict") assert len(errors) == 1
async def test_store_file_in_cloud(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert getattr(ob.file, "upload_file_id", None) is None assert ob.file.uri is not None assert ob.file.content_type == "image/gif" assert ob.file.filename == "test.gif" assert ob.file._size == len(_test_gif) assert ob.file.md5 is not None assert len(await get_all_objects()) == 1 gmng = GCloudFileManager(ob, dummy_request, IContent["file"].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_iterate_storage(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() for _ in range(20): dummy_request.content.seek(0) dummy_request._cache_data = b"" dummy_request._last_read_pos = 0 ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() util = get_utility(IGCloudBlobStore) count = 0 async for item in util.iterate_bucket(): # noqa count += 1 assert count == 20 await _cleanup()
async def test_deleting_parent_deletes_children(cockroach_storage): async with cockroach_storage as storage: tm = TransactionManager(storage) txn = await tm.begin() folder = create_content(Folder, "Folder") txn.register(folder) ob = create_content() await folder.async_set("foobar", ob) assert len(txn.modified) == 2 await tm.commit(txn=txn) txn = await tm.begin() ob2 = await txn.get(ob.__uuid__) folder2 = await txn.get(folder.__uuid__) assert ob2.__uuid__ == ob.__uuid__ assert folder2.__uuid__ == folder.__uuid__ # delete parent, children should be gone... txn.delete(folder2) assert len(txn.deleted) == 1 await tm.commit(txn=txn) # give delete task a chance to execute await asyncio.sleep(0.1) txn = await tm.begin() with pytest.raises(KeyError): await txn.get(ob.__uuid__) with pytest.raises(KeyError): await txn.get(folder.__uuid__) await tm.abort(txn=txn)
async def test_store_file_in_cloud_using_tus(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "UPLOAD-EXTENSION": "gif", "UPLOAD-FILENAME": "test.gif", "UPLOAD-LENGTH": len(_test_gif), "TUS-RESUMABLE": "1.0.0", "Content-Length": len(_test_gif), "upload-offset": 0, } ) own_dummy_request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.tus_create() await mng.tus_patch() assert ob.file._upload_file_id is None assert ob.file.uri is not None assert ob.file.content_type == "image/gif" assert ob.file.filename == "test.gif" assert ob.file._size == len(_test_gif) assert len(await get_all_objects()) == 1 gmng = S3FileStorageManager(ob, own_dummy_request, IContent["file"].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_read_range(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert getattr(ob.file, "upload_file_id", None) is None assert ob.file.uri is not None assert len(await get_all_objects()) == 1 gmng = GCloudFileManager(ob, dummy_request, IContent["file"].bind(ob)) async for chunk in gmng.read_range(0, 100): assert len(chunk) == 100 assert chunk == _test_gif[:100] async for chunk in gmng.read_range(100, 200): assert len(chunk) == 100 assert chunk == _test_gif[100:200]
async def test_get_total_resources_of_type(db, dummy_guillotina): aps = await get_aps(db) with TransactionManager(aps) as tm, await tm.begin() as txn: ob = create_content() txn.register(ob) await tm.commit(txn=txn) txn = await tm.begin() assert 1 == await txn.get_total_resources_of_type('Item') await tm.abort(txn=txn) await aps.remove() await cleanup(aps)
async def run3(): lookup_registry = { ISecurityInfo: DefaultSecurityInfoAdapter } ob = test_utils.create_content() print('Test manual lookup') type_ = type(ob) start = time.time() for _ in range(ITERATIONS): for interface in type_.__implemented__.flattened(): # this returns in correct order if interface in lookup_registry: DefaultSecurityInfoAdapter(ob) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def test_deserialize_cloud_file(dummy_request): from guillotina.test_package import IFileContent, FileContent with get_tm() as tm, await tm.begin() as txn, dummy_request: obj = create_content(FileContent) obj.__txn__ = txn obj.file = None await get_adapter( IFileContent['file'].bind(obj), IJSONToValue, args=[ 'data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7', obj ]) assert isinstance(obj.file, DBFile) assert obj.file.size == 42
async def test_patch_int_field_invalid_type(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) for op in ('inc', 'dec', 'reset'): errors = [] await deserializer.set_schema(ITestSchema, content, {'patch_int': { 'op': op, 'value': 3.3 }}, errors) assert getattr(content, 'patch_int', 0) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)
async def test_patch_list_field_invalid_type(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestSchema, content, {'patch_list': { 'op': 'append', 'value': 1 }}, errors) assert len(getattr(content, 'patch_list', [])) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], ValueDeserializationError)
async def test_invalidate_object(guillotina_main, loop): util = get_utility(ICacheUtility) trns = mocks.MockTransaction(mocks.MockTransactionManager()) trns.added = trns.deleted = {} content = create_content() trns.modified = {content.__uuid__: content} rcache = BasicCache(trns) await rcache.clear() await rcache.set("foobar", oid=content.__uuid__) assert util._memory_cache.get("root-" + content.__uuid__) == "foobar" assert await rcache.get(oid=content.__uuid__) == "foobar" await rcache.close(invalidate=True) assert await rcache.get(oid=content.__uuid__) is None
async def test_parse_bbb_plone(dummy_guillotina): from guillotina.catalog.parser import BaseParser content = test_utils.create_content(Container) parser = BaseParser(None, content) result = parser( {"portal_type": "Folder", "SearchableText": "foobar", "b_size": 45, "b_start": 50, "path.depth": 2} ) assert "searchabletext__or" in result["params"] assert "title__in" in result["params"]["searchabletext__or"] assert "depth" in result["params"] assert "type_name" in result["params"] assert "portal_type" not in result["params"] assert result["_from"] == 50 assert result["size"] == 45
async def test_patch_list_field_invalid_type(dummy_request, mock_txn): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema(ITestSchema, content, {"patch_list": { "op": "append", "value": 1 }}, errors) assert len(getattr(content, "patch_list", [])) == 0 assert len(errors) == 1 assert isinstance(errors[0]["error"], ValueDeserializationError)
async def test_patch_int_field_invalid_type(dummy_request): login() content = create_content() deserializer = get_multi_adapter((content, dummy_request), IResourceDeserializeFromJson) for op in ("inc", "dec", "reset"): errors = [] await deserializer.set_schema(ITestSchema, content, {"patch_int": { "op": op, "value": 3.3 }}, errors) assert getattr(content, "patch_int", 0) == 0 assert len(errors) == 1 assert isinstance(errors[0]["error"], WrongType)
async def test_deserialize_cloud_file(dummy_request, mock_txn): from guillotina.test_package import IFileContent, FileContent obj = create_content(FileContent) obj.file = None await get_adapter( IFileContent["file"].bind(obj), IJSONToValue, args=[ "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7", obj ], ) assert isinstance(obj.file, DBFile) assert obj.file.size == 42
async def test_pg_field_parser(dummy_guillotina): from guillotina.contrib.catalog.pg import Parser content = test_utils.create_content(Container) parser = Parser(None, content) # test convert operators for q1, q2 in (("gte", ">="), ("gt", ">"), ("eq", "="), ("lte", "<="), ("not", "!="), ("lt", "<")): where, value, select = parser.process_queried_field(f"depth__{q1}", "2") assert f" {q2} " in where assert value == [2] # bad int assert parser.process_queried_field(f"depth__{q1}", "foobar") is None # convert bool where, value, select = parser.process_queried_field(f"boolean_field", "true") assert value == [True] where, value, select = parser.process_queried_field(f"boolean_field", "false") assert value == [False] # none for invalid assert parser.process_queried_field(f"foobar", None) is None # convert to list where, value, select = parser.process_queried_field(f"tags__in", "foo,bar") assert value == [["foo", "bar"]] assert " ?| " in where where, value, select = parser.process_queried_field(f"tags", "bar") assert " ? " in where where, value, select = parser.process_queried_field(f"tags", ["foo", "bar"]) assert " ?| " in where # date parsing where, value, select = parser.process_queried_field( f"creation_date__gte", "2019-06-15T18:37:31.008359+00:00" ) assert isinstance(value[0], datetime) # path where, value, select = parser.process_queried_field(f"path", "/foo/bar") assert "substring(json->>" in where # ft where, value, select = parser.process_queried_field(f"title", "foobar") assert "to_tsvector" in where
async def test_deserialize_cloud_file(dummy_request): from guillotina.test_package import IFileContent, FileContent request = dummy_request # noqa tm = dummy_request._tm txn = await tm.begin(dummy_request) obj = create_content(FileContent) obj._p_jar = txn obj.file = None await get_adapter( IFileContent['file'].bind(obj), IJSONToValue, args=[ 'data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7', obj ]) assert isinstance(obj.file, DBFile) assert obj.file.size == 42
async def test_patch_list_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_list': { 'op': 'append', 'value': 1 } }, errors) assert len(getattr(content, 'patch_list', [])) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], ValueDeserializationError)
async def test_patch_int_field_invalid_type(dummy_request): request = dummy_request # noqa login(request) content = create_content() deserializer = get_multi_adapter( (content, request), IResourceDeserializeFromJson) for op in ('inc', 'dec', 'reset'): errors = [] await deserializer.set_schema( ITestSchema, content, { 'patch_int': { 'op': op, 'value': 3.3 } }, errors) assert getattr(content, 'patch_int', 0) == 0 assert len(errors) == 1 assert isinstance(errors[0]['error'], WrongType)