def test_get_and_validate_invalid(): cache = _cache.ContextCache() test_entity = mock.Mock(_key="test") cache["test"] = test_entity test_entity._key = "changed_key" with pytest.raises(KeyError): cache.get_and_validate("test")
def __new__( cls, client, eventloop=None, stub=None, batches=None, commit_batches=None, transaction=None, cache=None, cache_policy=None, global_cache=None, global_cache_policy=None, global_cache_timeout_policy=None, datastore_policy=None, on_commit_callbacks=None, legacy_data=True, ): # Prevent circular import in Python 2.7 from google.cloud.ndb import _cache from google.cloud.ndb import _datastore_api if eventloop is None: eventloop = _eventloop.EventLoop() if stub is None: stub = _datastore_api.make_stub(client) if batches is None: batches = {} if commit_batches is None: commit_batches = {} # Create a cache and, if an existing cache was passed into this # method, duplicate its entries. new_cache = _cache.ContextCache() if cache: new_cache.update(cache) context = super(_Context, cls).__new__( cls, client=client, eventloop=eventloop, stub=stub, batches=batches, commit_batches=commit_batches, transaction=transaction, cache=new_cache, global_cache=global_cache, on_commit_callbacks=on_commit_callbacks, legacy_data=legacy_data, ) context.set_cache_policy(cache_policy) context.set_global_cache_policy(global_cache_policy) context.set_global_cache_timeout_policy(global_cache_timeout_policy) context.set_datastore_policy(datastore_policy) return context
def test_get_and_validate_miss(): cache = _cache.ContextCache() with pytest.raises(KeyError): cache.get_and_validate("nonexistent_key")
def test_get_and_validate_none(): cache = _cache.ContextCache() cache["test"] = None assert cache.get_and_validate("test") is None
def test_get_and_validate_valid(): cache = _cache.ContextCache() test_entity = mock.Mock(_key="test") cache["test"] = test_entity assert cache.get_and_validate("test") is test_entity
def test___repr__(): cache = _cache.ContextCache() cache["hello dad"] = "i'm in jail" assert repr(cache) == "ContextCache()"
def __new__( cls, client, id=None, namespace=key_module.UNDEFINED, eventloop=None, batches=None, commit_batches=None, transaction=None, cache=None, cache_policy=None, global_cache=None, global_cache_policy=None, global_cache_timeout_policy=None, datastore_policy=None, on_commit_callbacks=None, transaction_complete_callbacks=None, legacy_data=True, retry=None, rpc_time=None, wait_time=None, ): # Prevent circular import in Python 2.7 from google.cloud.ndb import _cache if id is None: id = next(_context_ids) if eventloop is None: eventloop = _eventloop.EventLoop() if batches is None: batches = {} if commit_batches is None: commit_batches = {} # Create a cache and, if an existing cache was passed into this # method, duplicate its entries. new_cache = _cache.ContextCache() if cache: new_cache.update(cache) context = super(_Context, cls).__new__( cls, id=id, client=client, namespace=namespace, eventloop=eventloop, batches=batches, commit_batches=commit_batches, transaction=transaction, cache=new_cache, global_cache=global_cache, on_commit_callbacks=on_commit_callbacks, transaction_complete_callbacks=transaction_complete_callbacks, legacy_data=legacy_data, ) context.set_cache_policy(cache_policy) context.set_global_cache_policy(global_cache_policy) context.set_global_cache_timeout_policy(global_cache_timeout_policy) context.set_datastore_policy(datastore_policy) context.set_retry_state(retry) return context