def delete(key, options): """Delete an entity from Datastore. Deleting an entity that doesn't exist does not result in an error. The result is the same regardless. Args: key (datastore.Key): The key for the entity to be deleted. options (_options.Options): Options for this request. Returns: tasklets.Future: Will be finished when entity is deleted. Result will always be :data:`None`. """ context = context_module.get_context() use_global_cache = context._use_global_cache(key, options) use_datastore = context._use_datastore(key, options) if use_global_cache: cache_key = _cache.global_cache_key(key) if use_datastore: if use_global_cache: yield _cache.global_lock(cache_key) transaction = _get_transaction(options) if transaction: batch = _get_commit_batch(transaction, options) else: batch = _batch.get_batch(_NonTransactionalCommitBatch, options) yield batch.delete(key) if use_global_cache: yield _cache.global_delete(cache_key)
def put(entity, options): """Store an entity in datastore. The entity can be a new entity to be saved for the first time or an existing entity that has been updated. Args: entity_pb (datastore.Entity): The entity to be stored. options (_options.Options): Options for this request. Returns: tasklets.Future: Result will be completed datastore key (datastore.Key) for the entity. """ context = context_module.get_context() use_global_cache = context._use_global_cache(entity.key, options) use_datastore = context._use_datastore(entity.key, options) if not (use_global_cache or use_datastore): raise TypeError( "use_global_cache and use_datastore can't both be False") if not use_datastore and entity.key.is_partial: raise TypeError("Can't store partial keys when use_datastore is False") entity_pb = helpers.entity_to_protobuf(entity) cache_key = _cache.global_cache_key(entity.key) if use_global_cache and not entity.key.is_partial: if use_datastore: yield _cache.global_lock(cache_key) else: expires = context._global_cache_timeout(entity.key, options) cache_value = entity_pb.SerializeToString() yield _cache.global_set(cache_key, cache_value, expires=expires) if use_datastore: transaction = context.transaction if transaction: batch = _get_commit_batch(transaction, options) else: batch = _batch.get_batch(_NonTransactionalCommitBatch, options) key_pb = yield batch.put(entity_pb) if key_pb: key = helpers.key_from_protobuf(key_pb) else: key = None if use_global_cache: if transaction: context.global_cache_flush_keys.add(cache_key) else: yield _cache.global_delete(cache_key) raise tasklets.Return(key)
def test_global_delete(_batch, _global_cache): batch = _batch.get_batch.return_value future = _future_result("hi mom!") batch.add.return_value = future _global_cache.return_value = mock.Mock( transient_errors=(), strict_write=False, spec=("transient_errors", "strict_write"), ) assert _cache.global_delete(b"key").result() == "hi mom!" _batch.get_batch.assert_called_once_with(_cache._GlobalCacheDeleteBatch) batch.add.assert_called_once_with(b"key")
def _clear_global_cache(self): """Clears the global cache. Clears keys from the global cache that appear in the local context cache. In this way, only keys that were touched in the current context are affected. """ keys = [ _cache.global_cache_key(key._key) for key in self.cache if self._use_global_cache(key) ] if keys: yield [_cache.global_delete(key) for key in keys]
def _clear_global_cache(self): """Clears the global cache. Clears keys from the global cache that appear in the local context cache. In this way, only keys that were touched in the current context are affected. """ # Prevent circular import in Python 2.7 from google.cloud.ndb import _cache keys = [ _cache.global_cache_key(key._key) for key in self.cache.keys() if self._use_global_cache(key) ] if keys: yield [_cache.global_delete(key) for key in keys]
def test_global_delete(_batch): batch = _batch.get_batch.return_value assert _cache.global_delete(b"key") is batch.add.return_value _batch.get_batch.assert_called_once_with(_cache._GlobalCacheDeleteBatch) batch.add.assert_called_once_with(b"key")
def _transaction_async(context, callback, read_only=False): # Avoid circular import in Python 2.7 from google.cloud.ndb import _cache from google.cloud.ndb import _datastore_api # Start the transaction utils.logging_debug(log, "Start transaction") transaction_id = yield _datastore_api.begin_transaction(read_only, retries=0) utils.logging_debug(log, "Transaction Id: {}", transaction_id) on_commit_callbacks = [] tx_context = context.new( transaction=transaction_id, on_commit_callbacks=on_commit_callbacks, batches=None, commit_batches=None, cache=None, # We could just pass `None` here and let the `Context` constructor # instantiate a new event loop, but our unit tests inject a subclass of # `EventLoop` that makes testing a little easier. This makes sure the # new event loop is of the same type as the current one, to propagate # the event loop class used for testing. eventloop=type(context.eventloop)(), retry=context.get_retry_state(), ) # The outer loop is dependent on the inner loop def run_inner_loop(inner_context): with inner_context.use(): if inner_context.eventloop.run1(): return True # schedule again context.eventloop.add_idle(run_inner_loop, tx_context) tx_context.global_cache_flush_keys = flush_keys = set() with tx_context.use(): try: # Run the callback result = callback() if isinstance(result, tasklets.Future): result = yield result # Make sure we've run everything we can run before calling commit _datastore_api.prepare_to_commit(transaction_id) tx_context.eventloop.run() # Commit the transaction yield _datastore_api.commit(transaction_id, retries=0) # Rollback if there is an error except Exception as e: # noqa: E722 tx_context.cache.clear() yield _datastore_api.rollback(transaction_id) raise e # Flush keys of entities written during the transaction from the global cache if flush_keys: yield [_cache.global_delete(key) for key in flush_keys] for callback in on_commit_callbacks: callback() raise tasklets.Return(result)