def test_global_lock(_batch): batch = _batch.get_batch.return_value assert _cache.global_lock(b"key") is batch.add.return_value _batch.get_batch.assert_called_once_with( _cache._GlobalCacheSetBatch, {"expires": _cache._LOCK_TIME} ) batch.add.assert_called_once_with(b"key", _cache._LOCKED)
def delete(key, options): """Delete an entity from Datastore. Deleting an entity that doesn't exist does not result in an error. The result is the same regardless. Args: key (datastore.Key): The key for the entity to be deleted. options (_options.Options): Options for this request. Returns: tasklets.Future: Will be finished when entity is deleted. Result will always be :data:`None`. """ context = context_module.get_context() use_global_cache = context._use_global_cache(key, options) use_datastore = context._use_datastore(key, options) if use_global_cache: cache_key = _cache.global_cache_key(key) if use_datastore: if use_global_cache: yield _cache.global_lock(cache_key) transaction = _get_transaction(options) if transaction: batch = _get_commit_batch(transaction, options) else: batch = _batch.get_batch(_NonTransactionalCommitBatch, options) yield batch.delete(key) if use_global_cache: yield _cache.global_delete(cache_key)
def lookup(key, options): """Look up a Datastore entity. Gets an entity from Datastore, asynchronously. Checks the global cache, first, if appropriate. Uses batching. Args: key (~datastore.Key): The key for the entity to retrieve. options (_options.ReadOptions): The options for the request. For example, ``{"read_consistency": EVENTUAL}``. Returns: :class:`~tasklets.Future`: If not an exception, future's result will be either an entity protocol buffer or _NOT_FOUND. """ context = context_module.get_context() use_datastore = context._use_datastore(key, options) if use_datastore and options.transaction: use_global_cache = False else: use_global_cache = context._use_global_cache(key, options) if not (use_global_cache or use_datastore): raise TypeError( "use_global_cache and use_datastore can't both be False") entity_pb = _NOT_FOUND key_locked = False if use_global_cache: cache_key = _cache.global_cache_key(key) result = yield _cache.global_get(cache_key) key_locked = _cache.is_locked_value(result) if not key_locked: if result is not None: entity_pb = entity_pb2.Entity() entity_pb.MergeFromString(result) elif use_datastore: yield _cache.global_lock(cache_key, read=True) yield _cache.global_watch(cache_key) if entity_pb is _NOT_FOUND and use_datastore: batch = _batch.get_batch(_LookupBatch, options) entity_pb = yield batch.add(key) # Do not cache misses if use_global_cache and not key_locked: if entity_pb is not _NOT_FOUND: expires = context._global_cache_timeout(key, options) serialized = entity_pb.SerializeToString() yield _cache.global_compare_and_swap(cache_key, serialized, expires=expires) else: yield _cache.global_unwatch(cache_key) raise tasklets.Return(entity_pb)
def put(entity, options): """Store an entity in datastore. The entity can be a new entity to be saved for the first time or an existing entity that has been updated. Args: entity_pb (datastore.Entity): The entity to be stored. options (_options.Options): Options for this request. Returns: tasklets.Future: Result will be completed datastore key (datastore.Key) for the entity. """ context = context_module.get_context() use_global_cache = context._use_global_cache(entity.key, options) use_datastore = context._use_datastore(entity.key, options) if not (use_global_cache or use_datastore): raise TypeError( "use_global_cache and use_datastore can't both be False") if not use_datastore and entity.key.is_partial: raise TypeError("Can't store partial keys when use_datastore is False") entity_pb = helpers.entity_to_protobuf(entity) cache_key = _cache.global_cache_key(entity.key) if use_global_cache and not entity.key.is_partial: if use_datastore: yield _cache.global_lock(cache_key) else: expires = context._global_cache_timeout(entity.key, options) cache_value = entity_pb.SerializeToString() yield _cache.global_set(cache_key, cache_value, expires=expires) if use_datastore: transaction = context.transaction if transaction: batch = _get_commit_batch(transaction, options) else: batch = _batch.get_batch(_NonTransactionalCommitBatch, options) key_pb = yield batch.put(entity_pb) if key_pb: key = helpers.key_from_protobuf(key_pb) else: key = None if use_global_cache: if transaction: context.global_cache_flush_keys.add(cache_key) else: yield _cache.global_delete(cache_key) raise tasklets.Return(key)
def test_global_lock(_batch, _global_cache): batch = _batch.get_batch.return_value future = _future_result("hi mom!") batch.add.return_value = future _global_cache.return_value = mock.Mock( transient_errors=(), clear_cache_soon=False, spec=("transient_errors", "clear_cache_soon"), ) assert _cache.global_lock(b"key").result() == "hi mom!" _batch.get_batch.assert_called_once_with(_cache._GlobalCacheSetBatch, {"expires": _cache._LOCK_TIME}) batch.add.assert_called_once_with(b"key", _cache._LOCKED)