def test_it(self): options = {"foo": "bar"} batch = _batch.get_batch(MockBatch, options) assert batch.options is options assert not batch.idle_called different_options = {"food": "barn"} assert _batch.get_batch(MockBatch, different_options) is not batch assert _batch.get_batch(MockBatch) is not batch assert _batch.get_batch(MockBatch, options) is batch _eventloop.run() assert batch.idle_called
def delete(key, options): """Delete an entity from Datastore. Deleting an entity that doesn't exist does not result in an error. The result is the same regardless. Args: key (datastore.Key): The key for the entity to be deleted. options (_options.Options): Options for this request. Returns: tasklets.Future: Will be finished when entity is deleted. Result will always be :data:`None`. """ context = context_module.get_context() use_global_cache = context._use_global_cache(key, options) use_datastore = context._use_datastore(key, options) if use_global_cache: cache_key = _cache.global_cache_key(key) if use_datastore: if use_global_cache: yield _cache.global_lock(cache_key) transaction = _get_transaction(options) if transaction: batch = _get_commit_batch(transaction, options) else: batch = _batch.get_batch(_NonTransactionalCommitBatch, options) yield batch.delete(key) if use_global_cache: yield _cache.global_delete(cache_key)
def global_set_if_not_exists(key, value, expires=None): """Store entity in the global cache if key is not already present. Args: key (bytes): The key to save. value (bytes): The entity to save. expires (Optional[float]): Number of seconds until value expires. Returns: tasklets.Future: Eventual result will be a ``bool`` value which will be :data:`True` if a new value was set for the key, or :data:`False` if a value was already set for the key or if a transient error occurred while attempting to set the key. """ options = {} if expires: options = {"expires": expires} cache = _global_cache() batch = _batch.get_batch(_GlobalCacheSetIfNotExistsBatch, options) try: success = yield batch.add(key, value) except cache.transient_errors: success = False raise tasklets.Return(success)
def put(entity, options): """Store an entity in datastore. The entity can be a new entity to be saved for the first time or an existing entity that has been updated. Args: entity_pb (datastore.Entity): The entity to be stored. options (_options.Options): Options for this request. Returns: tasklets.Future: Result will be completed datastore key (datastore.Key) for the entity. """ context = context_module.get_context() use_global_cache = context._use_global_cache(entity.key, options) use_datastore = context._use_datastore(entity.key, options) if not (use_global_cache or use_datastore): raise TypeError("use_global_cache and use_datastore can't both be False") if not use_datastore and entity.key.is_partial: raise TypeError("Can't store partial keys when use_datastore is False") lock = None entity_pb = helpers.entity_to_protobuf(entity) cache_key = _cache.global_cache_key(entity.key) if use_global_cache and not entity.key.is_partial: if use_datastore: lock = yield _cache.global_lock_for_write(cache_key) else: expires = context._global_cache_timeout(entity.key, options) cache_value = entity_pb.SerializeToString() yield _cache.global_set(cache_key, cache_value, expires=expires) if use_datastore: transaction = context.transaction if transaction: batch = _get_commit_batch(transaction, options) else: batch = _batch.get_batch(_NonTransactionalCommitBatch, options) key_pb = yield batch.put(entity_pb) if key_pb: key = helpers.key_from_protobuf(key_pb) else: key = None if lock: if transaction: def callback(): _cache.global_unlock_for_write(cache_key, lock).result() context.call_on_transaction_complete(callback) else: yield _cache.global_unlock_for_write(cache_key, lock) raise tasklets.Return(key)
def lookup(key, options): """Look up a Datastore entity. Gets an entity from Datastore, asynchronously. Checks the global cache, first, if appropriate. Uses batching. Args: key (~datastore.Key): The key for the entity to retrieve. options (_options.ReadOptions): The options for the request. For example, ``{"read_consistency": EVENTUAL}``. Returns: :class:`~tasklets.Future`: If not an exception, future's result will be either an entity protocol buffer or _NOT_FOUND. """ context = context_module.get_context() use_datastore = context._use_datastore(key, options) if use_datastore and options.transaction: use_global_cache = False else: use_global_cache = context._use_global_cache(key, options) if not (use_global_cache or use_datastore): raise TypeError( "use_global_cache and use_datastore can't both be False") entity_pb = _NOT_FOUND key_locked = False if use_global_cache: cache_key = _cache.global_cache_key(key) result = yield _cache.global_get(cache_key) key_locked = _cache.is_locked_value(result) if not key_locked: if result is not None: entity_pb = entity_pb2.Entity() entity_pb.MergeFromString(result) elif use_datastore: yield _cache.global_lock(cache_key, read=True) yield _cache.global_watch(cache_key) if entity_pb is _NOT_FOUND and use_datastore: batch = _batch.get_batch(_LookupBatch, options) entity_pb = yield batch.add(key) # Do not cache misses if use_global_cache and not key_locked: if entity_pb is not _NOT_FOUND: expires = context._global_cache_timeout(key, options) serialized = entity_pb.SerializeToString() yield _cache.global_compare_and_swap(cache_key, serialized, expires=expires) else: yield _cache.global_unwatch(cache_key) raise tasklets.Return(entity_pb)
def global_delete(key): """Delete an entity from the global cache. Args: key (bytes): The key to delete. Returns: tasklets.Future: Eventual result will be ``None``. """ batch = _batch.get_batch(_GlobalCacheDeleteBatch) return batch.add(key)
def global_get(key): """Get entity from global cache. Args: key (bytes): The key to get. Returns: tasklets.Future: Eventual result will be the entity (``bytes``) or ``None``. """ batch = _batch.get_batch(_GlobalCacheGetBatch) return batch.add(key)
def allocate(keys, options): """Allocate ids for incomplete keys. Args: key (key.Key): The incomplete key. options (_options.Options): The options for the request. Returns: tasklets.Future: A future for the key completed with the allocated id. """ batch = _batch.get_batch(_AllocateIdsBatch, options) return batch.add(keys)
def global_watch(key): """Start optimistic transaction with global cache. A future call to :func:`global_compare_and_swap` will only set the value if the value hasn't changed in the cache since the call to this function. Args: key (bytes): The key to watch. Returns: tasklets.Future: Eventual result will be ``None``. """ batch = _batch.get_batch(_GlobalCacheWatchBatch) return batch.add(key)
def lookup_callback(self, rpc): """Process the results of a call to Datastore Lookup. Each key in the batch will be in one of `found`, `missing`, or `deferred`. `found` keys have their futures' results set with the protocol buffers for their entities. `missing` keys have their futures' results with `_NOT_FOUND`, a sentinel value. `deferrred` keys are loaded into a new batch so they can be tried again. Args: rpc (tasklets.Future): If not an exception, the result will be an instance of :class:`google.cloud.datastore_v1.datastore_pb.LookupResponse` """ # If RPC has resulted in an exception, propagate that exception to all # waiting futures. exception = rpc.exception() if exception is not None: for future in itertools.chain(*self.todo.values()): future.set_exception(exception) return # Process results, which are divided into found, missing, and deferred results = rpc.result() log.debug(results) # For all deferred keys, batch them up again with their original # futures if results.deferred: next_batch = _batch.get_batch(type(self), self.options) for key in results.deferred: todo_key = key.SerializeToString() next_batch.todo.setdefault(todo_key, []).extend( self.todo[todo_key] ) # For all missing keys, set result to _NOT_FOUND and let callers decide # how to handle for result in results.missing: todo_key = result.entity.key.SerializeToString() for future in self.todo[todo_key]: future.set_result(_NOT_FOUND) # For all found entities, set the result on their corresponding futures for result in results.found: entity = result.entity todo_key = entity.key.SerializeToString() for future in self.todo[todo_key]: future.set_result(entity)
def global_unwatch(key): """End optimistic transaction with global cache. Indicates that value for the key wasn't found in the database, so there will not be a future call to :func:`global_compare_and_swap`, and we no longer need to watch this key. Args: key (bytes): The key to unwatch. Returns: tasklets.Future: Eventual result will be ``None``. """ batch = _batch.get_batch(_GlobalCacheUnwatchBatch) return batch.add(key)
def global_set(key, value, expires=None): """Store entity in the global cache. Args: key (bytes): The key to save. value (bytes): The entity to save. expires (Optional[float]): Number of seconds until value expires. Returns: tasklets.Future: Eventual result will be ``None``. """ options = {} if expires: options = {"expires": expires} batch = _batch.get_batch(_GlobalCacheSetBatch, options) return batch.add(key, value)
def global_set(key, value, expires=None, read=False): """Store entity in the global cache. Args: key (bytes): The key to save. value (bytes): The entity to save. expires (Optional[float]): Number of seconds until value expires. read (bool): Indicates if being set in a read (lookup) context. Returns: tasklets.Future: Eventual result will be ``None``. """ options = {} if expires is not None: # Actually testing if expires isnt set to any other value except None options = {"expires": expires} batch = _batch.get_batch(_GlobalCacheSetBatch, options) return batch.add(key, value)
def allocate(keys, options): """Allocate ids for incomplete keys. Args: key (key.Key): The incomplete key. options (_options.Options): The options for the request. Returns: tasklets.Future: A future for the key completed with the allocated id. """ futures = [] while keys: batch = _batch.get_batch(_AllocateIdsBatch, options) room_left = batch.room_left() batch_keys = keys[:room_left] futures.extend(batch.add(batch_keys)) keys = keys[room_left:] return tasklets._MultiFuture(futures)
def global_compare_and_swap(key, value, expires=None): """Like :func:`global_set` but using an optimistic transaction. Value will only be set for the given key if the value in the cache hasn't changed since a preceding call to :func:`global_watch`. Args: key (bytes): The key to save. value (bytes): The entity to save. expires (Optional[float]): Number of seconds until value expires. Returns: tasklets.Future: Eventual result will be ``None``. """ options = {} if expires: options["expires"] = expires batch = _batch.get_batch(_GlobalCacheCompareAndSwapBatch, options) return batch.add(key, value)
def test_allocate(): options = _options.Options() future = _api.allocate(["one", "two"], options) batch = _batch.get_batch(_api._AllocateIdsBatch, options) assert batch.keys == ["one", "two"] assert batch.futures == future._dependencies