Ejemplo n.º 1
0
def _get_commit_batch(transaction, options):
    """Get the commit batch for the current context and transaction.

    Args:
        transaction (bytes): The transaction id. Different transactions will
            have different batchs.
        options (_options.Options): Options for the batch. Not supported at
            this time.

    Returns:
        _TransactionalCommitBatch: The batch.
    """
    # Support for different options will be tricky if we're in a transaction,
    # since we can only do one commit, so any options that affect that gRPC
    # call would all need to be identical. For now, no options are supported
    # here.
    for key, value in options.items():
        if value:
            raise NotImplementedError("Passed bad option: {!r}".format(key))

    # Since we're in a transaction, we need to hang on to the batch until
    # commit time, so we need to store it separately from other batches.
    context = context_module.get_context()
    batch = context.commit_batches.get(transaction)
    if batch is None:
        batch = _TransactionalCommitBatch(transaction, options)
        context.commit_batches[transaction] = batch

    return batch
Ejemplo n.º 2
0
    def tasklet_wrapper(*args, **kwargs):
        # The normal case is that the wrapped function is a generator function
        # that returns a generator when called. We also support the case that
        # the user has wrapped a regular function with the tasklet decorator.
        # In this case, we fail to realize an actual tasklet, but we go ahead
        # and create a future object and set the result to the function's
        # return value so that from the user perspective there is no problem.
        # This permissive behavior is inherited from legacy NDB.
        context = context_module.get_context()

        try:
            returned = wrapped(*args, **kwargs)
        except StopIteration as stop:
            # If wrapped  is a regular function and the function uses the
            # deprecated "raise Return(result)" pattern rather than just
            # returning the result, then we'll extract the result from the
            # StopIteration exception.
            returned = _get_return_value(stop)

        if isinstance(returned, types.GeneratorType):
            # We have a tasklet, start it
            future = _TaskletFuture(returned, context, info=wrapped.__name__)
            future._advance_tasklet()

        else:
            # We don't have a tasklet, but we fake it anyway
            future = Future(info=wrapped.__name__)
            future.set_result(returned)

        return future
Ejemplo n.º 3
0
def _datastore_commit(mutations, transaction, retries=None, timeout=None):
    """Call Commit on Datastore.

    Args:
        mutations (List[datastore_pb2.Mutation]): The changes to persist to
            Datastore.
        transaction (Union[bytes, NoneType]): The identifier for the
            transaction for this commit, or :data:`None` if no transaction is
            being used.
        retries (int): Number of times to potentially retry the call. If
            :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`.
            If :data:`0` is passed, the call is attempted only once.
        timeout (float): Timeout, in seconds, to pass to gRPC call. If
            :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`.

    Returns:
        tasklets.Tasklet: A future for
            :class:`google.cloud.datastore_v1.datastore_pb2.CommitResponse`
    """
    if transaction is None:
        mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL
    else:
        mode = datastore_pb2.CommitRequest.TRANSACTIONAL

    client = context_module.get_context().client
    request = datastore_pb2.CommitRequest(
        project_id=client.project,
        mode=mode,
        mutations=mutations,
        transaction=transaction,
    )

    return make_call("Commit", request, retries=retries, timeout=timeout)
Ejemplo n.º 4
0
def _datastore_lookup(keys, read_options, retries=None, timeout=None):
    """Issue a Lookup call to Datastore using gRPC.

    Args:
        keys (Iterable[entity_pb2.Key]): The entity keys to
            look up.
        read_options (Union[datastore_pb2.ReadOptions, NoneType]): Options for
            the request.
        retries (int): Number of times to potentially retry the call. If
            :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`.
            If :data:`0` is passed, the call is attempted only once.
        timeout (float): Timeout, in seconds, to pass to gRPC call. If
            :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`.

    Returns:
        tasklets.Future: Future object for eventual result of lookup.
    """
    client = context_module.get_context().client
    request = datastore_pb2.LookupRequest(
        project_id=client.project,
        keys=[key for key in keys],
        read_options=read_options,
    )

    return make_call("Lookup", request, retries=retries, timeout=timeout)
Ejemplo n.º 5
0
def delete(key, options):
    """Delete an entity from Datastore.

    Deleting an entity that doesn't exist does not result in an error. The
    result is the same regardless.

    Args:
        key (datastore.Key): The key for the entity to be deleted.
        options (_options.Options): Options for this request.

    Returns:
        tasklets.Future: Will be finished when entity is deleted. Result will
            always be :data:`None`.
    """
    context = context_module.get_context()
    use_global_cache = context._use_global_cache(key, options)
    use_datastore = context._use_datastore(key, options)

    if use_global_cache:
        cache_key = _cache.global_cache_key(key)

    if use_datastore:
        if use_global_cache:
            yield _cache.global_lock(cache_key)

        transaction = _get_transaction(options)
        if transaction:
            batch = _get_commit_batch(transaction, options)
        else:
            batch = _batch.get_batch(_NonTransactionalCommitBatch, options)

        yield batch.delete(key)

    if use_global_cache:
        yield _cache.global_delete(cache_key)
Ejemplo n.º 6
0
    def test_success(_datastore_api):
        context_module.get_context().cache["foo"] = "bar"
        on_commit_callback = mock.Mock()

        def callback():
            context = context_module.get_context()
            assert not context.cache
            context.call_on_commit(on_commit_callback)
            return "I tried, momma."

        begin_future = tasklets.Future("begin transaction")
        _datastore_api.begin_transaction.return_value = begin_future

        commit_future = tasklets.Future("commit transaction")
        _datastore_api.commit.return_value = commit_future

        future = _transaction.transaction_async(callback)

        _datastore_api.begin_transaction.assert_called_once_with(
            False, retries=0
        )
        begin_future.set_result(b"tx123")

        _datastore_api.commit.assert_called_once_with(b"tx123", retries=0)
        commit_future.set_result(None)

        assert future.result() == "I tried, momma."
        on_commit_callback.assert_called_once_with()
Ejemplo n.º 7
0
def _datastore_begin_transaction(read_only, retries=None, timeout=None):
    """Calls ``BeginTransaction`` on Datastore.

    Args:
        read_only (bool): Whether to start a read-only or read-write
            transaction.
        retries (int): Number of times to potentially retry the call. If
            :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`.
            If :data:`0` is passed, the call is attempted only once.
        timeout (float): Timeout, in seconds, to pass to gRPC call. If
            :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`.

    Returns:
        tasklets.Tasklet: A future for
            :class:`google.cloud.datastore_v1.datastore_pb2.BeginTransactionResponse`
    """
    client = context_module.get_context().client
    if read_only:
        options = datastore_pb2.TransactionOptions(
            read_only=datastore_pb2.TransactionOptions.ReadOnly())
    else:
        options = datastore_pb2.TransactionOptions(
            read_write=datastore_pb2.TransactionOptions.ReadWrite())

    request = datastore_pb2.BeginTransactionRequest(
        project_id=client.project, transaction_options=options)

    return make_call("BeginTransaction",
                     request,
                     retries=retries,
                     timeout=timeout)
Ejemplo n.º 8
0
    def test__context():
        with patch_credentials("testing"):
            client = client_module.Client()

        with client.context():
            context = context_module.get_context()
            assert context.client is client
Ejemplo n.º 9
0
def put(entity, options):
    """Store an entity in datastore.

    The entity can be a new entity to be saved for the first time or an
    existing entity that has been updated.

    Args:
        entity_pb (datastore.Entity): The entity to be stored.
        options (_options.Options): Options for this request.

    Returns:
        tasklets.Future: Result will be completed datastore key
            (datastore.Key) for the entity.
    """
    context = context_module.get_context()
    use_global_cache = context._use_global_cache(entity.key, options)
    use_datastore = context._use_datastore(entity.key, options)
    if not (use_global_cache or use_datastore):
        raise TypeError("use_global_cache and use_datastore can't both be False")

    if not use_datastore and entity.key.is_partial:
        raise TypeError("Can't store partial keys when use_datastore is False")

    lock = None
    entity_pb = helpers.entity_to_protobuf(entity)
    cache_key = _cache.global_cache_key(entity.key)
    if use_global_cache and not entity.key.is_partial:
        if use_datastore:
            lock = yield _cache.global_lock_for_write(cache_key)
        else:
            expires = context._global_cache_timeout(entity.key, options)
            cache_value = entity_pb.SerializeToString()
            yield _cache.global_set(cache_key, cache_value, expires=expires)

    if use_datastore:
        transaction = context.transaction
        if transaction:
            batch = _get_commit_batch(transaction, options)
        else:
            batch = _batch.get_batch(_NonTransactionalCommitBatch, options)

        key_pb = yield batch.put(entity_pb)
        if key_pb:
            key = helpers.key_from_protobuf(key_pb)
        else:
            key = None

        if lock:
            if transaction:

                def callback():
                    _cache.global_unlock_for_write(cache_key, lock).result()

                context.call_on_transaction_complete(callback)

            else:
                yield _cache.global_unlock_for_write(cache_key, lock)

        raise tasklets.Return(key)
Ejemplo n.º 10
0
def in_transaction():
    """Determine if there is a currently active transaction.

    Returns:
        bool: :data:`True` if there is a transaction for the current context,
            otherwise :data:`False`.
    """
    return context_module.get_context().transaction is not None
Ejemplo n.º 11
0
def lookup(key, options):
    """Look up a Datastore entity.

    Gets an entity from Datastore, asynchronously. Checks the global cache,
    first, if appropriate. Uses batching.

    Args:
        key (~datastore.Key): The key for the entity to retrieve.
        options (_options.ReadOptions): The options for the request. For
            example, ``{"read_consistency": EVENTUAL}``.

    Returns:
        :class:`~tasklets.Future`: If not an exception, future's result will be
            either an entity protocol buffer or _NOT_FOUND.
    """
    context = context_module.get_context()
    use_datastore = context._use_datastore(key, options)
    if use_datastore and options.transaction:
        use_global_cache = False
    else:
        use_global_cache = context._use_global_cache(key, options)

    if not (use_global_cache or use_datastore):
        raise TypeError(
            "use_global_cache and use_datastore can't both be False")

    entity_pb = _NOT_FOUND
    key_locked = False

    if use_global_cache:
        cache_key = _cache.global_cache_key(key)
        result = yield _cache.global_get(cache_key)
        key_locked = _cache.is_locked_value(result)
        if not key_locked:
            if result is not None:
                entity_pb = entity_pb2.Entity()
                entity_pb.MergeFromString(result)

            elif use_datastore:
                yield _cache.global_lock(cache_key, read=True)
                yield _cache.global_watch(cache_key)

    if entity_pb is _NOT_FOUND and use_datastore:
        batch = _batch.get_batch(_LookupBatch, options)
        entity_pb = yield batch.add(key)

        # Do not cache misses
        if use_global_cache and not key_locked:
            if entity_pb is not _NOT_FOUND:
                expires = context._global_cache_timeout(key, options)
                serialized = entity_pb.SerializeToString()
                yield _cache.global_compare_and_swap(cache_key,
                                                     serialized,
                                                     expires=expires)
            else:
                yield _cache.global_unwatch(cache_key)

    raise tasklets.Return(entity_pb)
Ejemplo n.º 12
0
    def _advance_tasklet(self, send_value=None, error=None):
        """Advance a tasklet one step by sending in a value or error."""
        try:
            with self.context.use():
                # Send the next value or exception into the generator
                if error:
                    self.generator.throw(type(error), error)

                # send_value will be None if this is the first time
                yielded = self.generator.send(send_value)

                # Context may have changed in tasklet
                self.context = context_module.get_context()

        except StopIteration as stop:
            # Generator has signalled exit, get the return value. This tasklet
            # has finished.
            self.set_result(_get_return_value(stop))
            return

        except Exception as error:
            # An error has occurred in the tasklet. This tasklet has finished.
            self.set_exception(error)
            return

        # This tasklet has yielded a value. We expect this to be a future
        # object (either NDB or gRPC) or a sequence of futures, in the case of
        # parallel yield.

        def done_callback(yielded):
            # To be called when a future dependency has completed.  Advance the
            # tasklet with the yielded value or error.
            #
            # It might be worth noting that legacy NDB added a callback to the
            # event loop which, in turn, called _help_tasklet_along. I don't
            # see a compelling reason not to go ahead and call _advance_tasklet
            # immediately here, rather than queue it up to be called soon by
            # the event loop. This is subject to change if the reason for the
            # indirection in the original implementation becomes apparent.
            error = yielded.exception()
            if error:
                self._advance_tasklet(error=error)
            else:
                self._advance_tasklet(yielded.result())

        if isinstance(yielded, Future):
            yielded.add_done_callback(done_callback)

        elif isinstance(yielded, _remote.RemoteCall):
            _eventloop.queue_rpc(yielded, done_callback)

        elif isinstance(yielded, (list, tuple)):
            future = _MultiFuture(yielded)
            future.add_done_callback(done_callback)

        else:
            raise RuntimeError(
                "A tasklet yielded an illegal value: {!r}".format(yielded))
Ejemplo n.º 13
0
 def wrap_endpoint(*args, **kwargs):
     """Wraps the endpoint method in a NDB Context."""
     context = context_module.get_context(raise_context_error=False)
     if not context:
         creds, project = auth.default()
         with ndb.Client(project=project, credentials=creds).context():
             return method(*args, **kwargs)
     # If endpoint is inside a NDB context don't create a new context.
     return method(*args, **kwargs)
Ejemplo n.º 14
0
        def callback():
            # The transaction uses its own in-memory cache, which should be empty in
            # the transaction context and not include the key set above.
            context = context_module.get_context()
            assert not context.cache

            context.call_on_commit(on_commit_callback)
            context.call_on_transaction_complete(transaction_complete_callback)
            return "I tried, momma."
Ejemplo n.º 15
0
def fetch(query):
    """Fetch query results.

    Args:
        query (query.QueryOptions): The query spec.

    Returns:
        tasklets.Future: Result is List[model.Model]: The query results.
    """
    client = context_module.get_context().client

    project_id = query.project
    if not project_id:
        project_id = client.project

    namespace = query.namespace
    if not namespace:
        namespace = client.namespace

    filter_pbs = (None, )
    if query.filters:
        filter_pbs = query.filters._to_filter()
        if not isinstance(filter_pbs, (tuple, list)):
            filter_pbs = (filter_pbs, )

    multiple_queries = len(filter_pbs) > 1

    if multiple_queries:
        # If we're aggregating multiple queries, then limit and offset will be
        # have to applied to the aggregate, not passed to Datastore to use on
        # individual queries
        offset = query.offset
        limit = query.limit
        query = query.copy(offset=0, limit=None)
    else:
        offset = limit = None

    queries = [
        _run_query(project_id, namespace, _query_to_protobuf(query, filter_pb))
        for filter_pb in filter_pbs
    ]
    result_sets = yield queries
    result_sets = [(_Result(result_type, result_pb, query.order_by)
                    for result_type, result_pb in result_set)
                   for result_set in result_sets]

    if len(result_sets) > 1:
        sortable = bool(query.order_by)
        results = _merge_results(result_sets, sortable)
    else:
        results = result_sets[0]

    if offset or limit:
        results = itertools.islice(results, offset, offset + limit)

    return [result.entity(query.projection) for result in results]
Ejemplo n.º 16
0
    def _advance_tasklet(self, send_value=None, error=None):
        """Advance a tasklet one step by sending in a value or error."""
        try:
            with self.context.use():
                # Send the next value or exception into the generator
                if error:
                    self.generator.throw(type(error), error)

                # send_value will be None if this is the first time
                yielded = self.generator.send(send_value)

                # Context may have changed in tasklet
                self.context = context_module.get_context()

        except StopIteration as stop:
            # Generator has signalled exit, get the return value. This tasklet
            # has finished.
            self.set_result(_get_return_value(stop))
            return

        except Exception as error:
            # An error has occurred in the tasklet. This tasklet has finished.
            self.set_exception(error)
            return

        # This tasklet has yielded a value. We expect this to be a future
        # object (either NDB or gRPC) or a sequence of futures, in the case of
        # parallel yield.

        def done_callback(yielded):
            # To be called when a future dependency has completed.  Advance the
            # tasklet with the yielded value or error.
            #
            # It was tempting to call `_advance_tasklet` (`_help_tasklet_along`
            # in Legacy) directly. Doing so, it has been found, can lead to
            # exceeding the maximum recursion depth. Queing it up to run on the
            # event loop avoids this issue by keeping the call stack shallow.
            error = yielded.exception()
            if error:
                _eventloop.call_soon(self._advance_tasklet, error=error)
            else:
                _eventloop.call_soon(self._advance_tasklet, yielded.result())

        if isinstance(yielded, Future):
            yielded.add_done_callback(done_callback)

        elif isinstance(yielded, _remote.RemoteCall):
            _eventloop.queue_rpc(yielded, done_callback)

        elif isinstance(yielded, (list, tuple)):
            future = _MultiFuture(yielded)
            future.add_done_callback(done_callback)

        else:
            raise RuntimeError(
                "A tasklet yielded an illegal value: {!r}".format(yielded))
Ejemplo n.º 17
0
def _key_function(values):
    if not len(values) % 2:
        context = context_module.get_context()
        client = context.client
        return key.Key(*values,
                       namespace=context.get_namespace(),
                       project=client.project)
    _raise_cast_error(
        "Key requires even number of operands or single string, {}".format(
            values))
Ejemplo n.º 18
0
def stub():
    """Get the stub for the `Google Datastore` API.

    Gets the stub from the current context.

    Returns:
        :class:`~google.cloud.datastore_v1.proto.datastore_pb2_grpc.DatastoreStub`:
            The stub instance.
    """
    context = context_module.get_context()
    return context.stub
Ejemplo n.º 19
0
def get_event_loop():
    """Get the current event loop.

    This function should be called within a context established by
    :func:`~google.cloud.ndb.ndb_context`.

    Returns:
        EventLoop: The event loop for the current context.
    """
    context = context_module.get_context()
    return context.eventloop
Ejemplo n.º 20
0
def in_transaction():
    """Determine if there is a currently active transaction.

    Returns:
        bool: :data:`True` if there is a transaction for the current context,
            otherwise :data:`False`.
    """
    # Avoid circular import in Python 2.7
    from google.cloud.ndb import context as context_module

    return context_module.get_context().transaction is not None
Ejemplo n.º 21
0
    def _handle_independent(self):
        """Always use a new transaction, "pausing" any existing transactions.
        A function that uses this policy should not return any entities read in
        the new transaction, as the entities are not transactionally consistent
        with the caller's transaction.
        """
        if in_transaction():
            # Avoid circular import in Python 2.7
            from google.cloud.ndb import context as context_module

            context = context_module.get_context()
            new_context = context.new(transaction=None)
            return new_context
Ejemplo n.º 22
0
    def retry_wrapper(*args, **kwargs):
        from google.cloud.ndb import context as context_module

        sleep_generator = core_retry.exponential_sleep_generator(
            _DEFAULT_INITIAL_DELAY,
            _DEFAULT_MAXIMUM_DELAY,
            _DEFAULT_DELAY_MULTIPLIER,
        )

        for sleep_time in itertools.islice(sleep_generator, retries + 1):
            context = context_module.get_context()
            if not context.in_retry():
                # We need to be able to identify if we are inside a nested
                # retry. Here, we set the retry state in the context. This is
                # used for deciding if an exception should be raised
                # immediately or passed up to the outer retry block.
                context.set_retry_state(repr(callback))
            try:
                result = callback(*args, **kwargs)
                if isinstance(result, tasklets.Future):
                    result = yield result
            except exceptions.NestedRetryException as e:
                error = e
            except Exception as e:
                # `e` is removed from locals at end of block
                error = e  # See: https://goo.gl/5J8BMK
                if not is_transient_error(error):
                    # If we are in an inner retry block, use special nested
                    # retry exception to bubble up to outer retry. Else, raise
                    # actual exception.
                    if context.get_retry_state() != repr(callback):
                        message = getattr(error, "message", str(error))
                        raise exceptions.NestedRetryException(message)
                    else:
                        raise error
            else:
                raise tasklets.Return(result)
            finally:
                # No matter what, if we are exiting the top level retry,
                # clear the retry state in the context.
                if context.get_retry_state() == repr(
                        callback):  # pragma: NO BRANCH
                    context.clear_retry_state()

            yield tasklets.sleep(sleep_time)

        raise core_exceptions.RetryError(
            "Maximum number of {} retries exceeded while calling {}".format(
                retries, callback),
            cause=error,
        )
Ejemplo n.º 23
0
        def non_transactional_inner_wrapper(*args, **kwargs):
            # Avoid circular import in Python 2.7
            from google.cloud.ndb import context as context_module

            context = context_module.get_context()
            if not context.in_transaction():
                return wrapped(*args, **kwargs)
            if not allow_existing:
                raise exceptions.BadRequestError(
                    "{} cannot be called within a transaction".format(
                        wrapped.__name__))
            new_context = context.new(transaction=None)
            with new_context.use():
                return wrapped(*args, **kwargs)
Ejemplo n.º 24
0
def get_event_loop():
    """Get the current event loop.

    This function should be called within a context established by
    :func:`~google.cloud.ndb.ndb_context`.

    Returns:
        EventLoop: The event loop for the current context.
    """
    # Prevent circular import in Python 2.7
    from google.cloud.ndb import context as context_module

    context = context_module.get_context()
    return context.eventloop
Ejemplo n.º 25
0
    def _next_batch(self):
        """Get the next batch from Datastore.

        If this batch isn't the last batch for the query, update the internal
        query spec with a cursor pointing to the next batch.
        """
        query = self._query
        response = yield _datastore_run_query(query)

        batch = response.batch
        result_type = batch.entity_result_type

        self._start_cursor = query.start_cursor
        self._index = 0
        self._batch = [
            _Result(result_type, result_pb, query.order_by)
            for result_pb in response.batch.entity_results
        ]

        if result_type == RESULT_TYPE_FULL:
            # If we cached a delete, remove it from the result set. This may come cause
            # some queries to return less than their limit even if there are more
            # results. As far as I can tell, that was also a possibility with the legacy
            # version.
            context = context_module.get_context()
            self._batch = [
                result
                for result in self._batch
                if result.check_cache(context) is not None
            ]

        self._has_next_batch = more_results = batch.more_results == NOT_FINISHED

        self._more_results_after_limit = batch.more_results == MORE_RESULTS_AFTER_LIMIT

        if more_results:
            # Fix up query for next batch
            limit = self._query.limit
            if limit is not None:
                limit -= len(self._batch)

            offset = self._query.offset
            if offset:
                offset -= response.batch.skipped_results

            self._query = self._query.copy(
                start_cursor=Cursor(batch.end_cursor),
                offset=offset,
                limit=limit,
            )
Ejemplo n.º 26
0
def _get_transaction(options):
    """Get the transaction for a request.

    If specified, this will return the transaction from ``options``. Otherwise,
    it will return the transaction for the current context.

    Args:
        options (Dict[str, Any]): The options for the request. Only
            ``transaction`` will have any bearing here.

    Returns:
        Union[bytes, NoneType]: The transaction identifier, or :data:`None`.
    """
    context = context_module.get_context()
    return options.get("transaction", context.transaction)
Ejemplo n.º 27
0
 def test_entity_full_entity_no_cache(model):
     context = context_module.get_context()
     with context.new(cache_policy=False).use():
         key_pb = entity_pb2.Key(
             partition_id=entity_pb2.PartitionId(project_id="testing"),
             path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)],
         )
         entity = mock.Mock(key=key_pb)
         model._entity_from_protobuf.return_value = entity
         result = _datastore_query._Result(
             _datastore_query.RESULT_TYPE_FULL,
             mock.Mock(entity=entity,
                       cursor=b"123",
                       spec=("entity", "cursor")),
         )
         assert result.entity() is entity
Ejemplo n.º 28
0
def get_batch(batch_cls, options=None):
    """Gets a data structure for storing batched calls to Datastore Lookup.

    The batch data structure is stored in the current context. If there is
    not already a batch started, a new structure is created and an idle
    callback is added to the current event loop which will eventually perform
    the batch look up.

    Args:
        batch_cls (type): Class representing the kind of operation being
            batched.
        options (_options.ReadOptions): The options for the request. Calls with
            different options will be placed in different batches.

    Returns:
        batch_cls: An instance of the batch class.
    """
    # prevent circular import in Python 2.7
    from google.cloud.ndb import context as context_module

    context = context_module.get_context()
    batches = context.batches.get(batch_cls)
    if batches is None:
        context.batches[batch_cls] = batches = {}

    if options is not None:
        options_key = tuple(
            sorted(((key, value) for key, value in options.items()
                    if value is not None)))
    else:
        options_key = ()

    batch = batches.get(options_key)
    if batch is not None and not batch.full():
        return batch

    def idler(batch):
        def idle():
            if batches.get(options_key) is batch:
                del batches[options_key]
            batch.idle_callback()

        return idle

    batches[options_key] = batch = batch_cls(options)
    _eventloop.add_idle(idler(batch))
    return batch
Ejemplo n.º 29
0
def _datastore_rollback(transaction, retries=None):
    """Calls Rollback in Datastore.

    Args:
        transaction (bytes): Transaction id.
        retries (int): Number of times to potentially retry the call. If
            :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`.
            If :data:`0` is passed, the call is attempted only once.

    Returns:
        tasklets.Tasklet: Future for
            :class:`google.cloud.datastore_v1.datastore_pb2.RollbackResponse`
    """
    client = context_module.get_context().client
    request = datastore_pb2.RollbackRequest(project_id=client.project,
                                            transaction=transaction)

    return make_call("Rollback", request, retries=retries)
Ejemplo n.º 30
0
    def test_entity_full_entity_cached(model):
        key = key_module.Key("ThisKind", 42)
        key_pb = entity_pb2.Key(
            partition_id=entity_pb2.PartitionId(project_id="testing"),
            path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)],
        )
        entity = mock.Mock(key=key_pb)
        cached_entity = mock.Mock(key=key_pb, _key=key)
        context = context_module.get_context()
        context.cache[key] = cached_entity
        model._entity_from_protobuf.return_value = entity
        result = _datastore_query._Result(
            _datastore_query.RESULT_TYPE_FULL,
            mock.Mock(entity=entity, cursor=b"123", spec=("entity", "cursor")),
        )

        assert result.entity() is not entity
        assert result.entity() is cached_entity