예제 #1
0
파일: db.py 프로젝트: Xeon2003/server
def GetAsync(keys, **kwargs):
    """
		Asynchronously retrieves one or more entities from the data store.

		This function is identical to :func:`server.db.Get`, except that it
		returns an asynchronous object. Call ``get_result()`` on the return value to
		block on the call and get the results.
	"""
    class AsyncResultWrapper:
        """
			Wraps an result thats allready there into something looking
			like an RPC-Object.
		"""
        def __init__(self, res):
            self.res = res

        def get_result(self):
            return (self.res)

    if conf["viur.db.caching"] > 0 and not datastore.IsInTransaction():
        if isinstance(keys, datastore_types.Key) or isinstance(
                keys, basestring):  #Just one:
            res = memcache.get(str(keys), namespace=__CacheKeyPrefix__)
            if res:
                return (AsyncResultWrapper(res))
    #Either the result wasnt found, or we got a list of keys to fetch;
    # --> no caching possible
    return (datastore.GetAsync(keys, **kwargs))
예제 #2
0
def bench(response, model_class, keys):
    if issubclass(model_class, db.Model):
        get_func = db.get
    else:
        get_func = ndb_get_multi_nocache

    for i in xrange(ITERATIONS):
        total = 0
        start = time.time()
        entities = get_func(keys)
        end = time.time()
        output(
            response, '  %s.get %d entities in %f seconds (total: %d)' %
            (model_class.__name__, len(entities), (end - start), total))

    if issubclass(model_class, ndb.Model):
        keys = [k.to_old_key() for k in keys]

    for i in xrange(ITERATIONS):
        start = time.time()
        rpc = datastore.GetAsync(keys)
        entities = rpc.get_result()
        end = time.time()

        if DUMP_ENTITIES and i == 0:
            response.write('\n')
            response.write('## ENTITY CONTENTS:\n')
            response.write(repr(entities))
            response.write('\n\n')

            for i, entity in enumerate(entities):
                response.write('  %d: %d keys %d serialized bytes\n' %
                               (i, len(entity), entity.ToPb().ByteSize()))

        output(
            response,
            '  datastore.GetAsync %d entities in %f seconds (total %d)' %
            (len(entities), (end - start), total))

    for i in xrange(ITERATIONS):
        start = time.time()
        entities = datastore_lazy.get(keys)
        end = time.time()

        output(
            response,
            '  datastore_lazy.get %d entities in %f seconds (total %d)' %
            (len(entities), (end - start), total))
def get(keys):
    """Get LazyEntities for each datastore object corresponding to the keys in keys. keys must be
    a list of db.Key objects. Deserializing datastore objects with many properties is very slow
    (~10 ms for an entity with 170 properties). google.appengine.api.datastore.GetAsync avoids
    some of the deserialization, but not all of it. This monkey-patches a private undocumented API
    to avoid nearly all of it.

    How Datastore deserialization normally works:
    * The datastore returns a blob of bytes.
    * The datastore result is parsed into a protocol buffer object: entity_pb.EntityProto. This
      probably happens in native C/C++ code in the App Engine standard environment; see comments:
      https://github.com/GoogleCloudPlatform/gcloud-python/issues/298
    * the entity_pb.EntityProto is converted into a datastore.Entity.
    * The datastore.Entity is converted into the appropriate db.Model subclass.

    This bypasses a lot of parsing by returning the EntityProto wrapped in a LazyEntity. Its likely
    to be quite a bit faster in many cases.

    If this breaks, it probably means the internal API has changed."""

    # db.get calls db.get_async calls datastore.GetAsync
    # datastore.GetAsync then calls _GetConnection(), then Connection.async_get
    # _GetConnection returns a thread-local so it should be safe to hack it in this way
    # datastore_rpc.BaseConnection uses self.__adapter.pb_to_entity to convert the entity
    # protocol buffer into an Entity: skip that step and return a LazyEntity instead
    connection = datastore._GetConnection()
    if connection._api_version != datastore_rpc._DATASTORE_V3:
        raise Exception("Unsupported API version: " + connection._api_version)
    # patch the connection because it is thread-local. Previously we patched adapter.pb_to_entity
    # which is shared. This caused exceptions in other threads under load. Oops.
    real_adapter = connection._BaseConnection__adapter
    wrapped_adapter = DatastoreLazyEntityAdapter(real_adapter)
    connection._BaseConnection__adapter = wrapped_adapter
    try:
        rpc = datastore.GetAsync(keys)
        return rpc.get_result()
    finally:
        connection._BaseConnection__adapter = real_adapter
예제 #4
0
    def get_async(cls, keys):
        keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys)

        rpc = datastore.GetAsync(keys)
        results = rpc.get_result()
        return results if multiple else results[0]