def get_from_cache_by_key(key): """ Given a datastore.Key (which should already have the namespace applied to it), return an entity from the context cache, falling back to memcache when possible. """ if not CACHE_ENABLED: return None context = get_context() namespace = key.namespace() or None ret = None if context.context_enabled: # It's safe to hit the context cache, because a new one was pushed on the stack at the start of the transaction ret = context.stack.top.get_entity_by_key(key) if ret is None and not datastore.IsInTransaction(): if context.memcache_enabled: ret = _get_entity_from_memcache_by_key(key) if ret: # Add back into the context cache add_entities_to_cache( utils.get_model_from_db_table(ret.key().kind()), [ret], CachingSituation.DATASTORE_GET, namespace, skip_memcache= True # Don't put in memcache, we just got it from there! ) elif context.memcache_enabled and not datastore.IsInTransaction(): ret = _get_entity_from_memcache_by_key(key) return ret
def get_from_cache(unique_identifier, namespace): """ Return an entity from the context cache, falling back to memcache when possible """ context = get_context() if not CACHE_ENABLED: return None cache_key = _apply_namespace(unique_identifier, namespace) ret = None if context.context_enabled: # It's safe to hit the context cache, because a new one was pushed on the stack at the start of the transaction ret = context.stack.top.get_entity(cache_key) if ret is None and not datastore.IsInTransaction(): if context.memcache_enabled: ret = _get_entity_from_memcache(cache_key) if ret: # Add back into the context cache add_entities_to_cache( utils.get_model_from_db_table(ret.key().kind()), [ret], CachingSituation.DATASTORE_GET, namespace, skip_memcache= True # Don't put in memcache, we just got it from there! ) elif context.memcache_enabled and not datastore.IsInTransaction(): ret = _get_entity_from_memcache(cache_key) return ret
def add_entity_to_cache(model, entity, situation): ensure_context() identifiers = unique_identifiers_from_entity(model, entity) # Don't cache on Get if we are inside a transaction, even in the context # This is because transactions don't see the current state of the datastore # We can still cache in the context on Put() but not in memcache if situation == CachingSituation.DATASTORE_GET and datastore.IsInTransaction( ): return if situation in (CachingSituation.DATASTORE_PUT, CachingSituation. DATASTORE_GET_PUT) and datastore.IsInTransaction(): # We have to wipe the entity from memcache if entity.key(): _remove_entity_from_memcache_by_key(entity.key()) _context.stack.top.cache_entity(identifiers, entity, situation) # Only cache in memcache of we are doing a GET (outside a transaction) or PUT (outside a transaction) # the exception is GET_PUT - which we do in our own transaction so we have to ignore that! if (not datastore.IsInTransaction() and situation in (CachingSituation.DATASTORE_GET, CachingSituation.DATASTORE_PUT)) or \ situation == CachingSituation.DATASTORE_GET_PUT: _add_entity_to_memcache(model, entity, identifiers)
def add_entities_to_cache(model, entities, situation, skip_memcache=False): ensure_context() # Don't cache on Get if we are inside a transaction, even in the context # This is because transactions don't see the current state of the datastore # We can still cache in the context on Put() but not in memcache if situation == CachingSituation.DATASTORE_GET and datastore.IsInTransaction(): return if situation in (CachingSituation.DATASTORE_PUT, CachingSituation.DATASTORE_GET_PUT) and datastore.IsInTransaction(): # We have to wipe the entity from memcache _remove_entities_from_memcache_by_key([entity.key() for entity in entities if entity.key()]) identifiers = [ unique_identifiers_from_entity(model, entity) for entity in entities ] for ent_identifiers, entity in zip(identifiers, entities): get_context().stack.top.cache_entity(ent_identifiers, entity, situation) # Only cache in memcache of we are doing a GET (outside a transaction) or PUT (outside a transaction) # the exception is GET_PUT - which we do in our own transaction so we have to ignore that! if (not datastore.IsInTransaction() and situation in (CachingSituation.DATASTORE_GET, CachingSituation.DATASTORE_PUT)) or \ situation == CachingSituation.DATASTORE_GET_PUT: if not skip_memcache: mc_key_entity_map = {} for ent_identifiers, entity in zip(identifiers, entities): mc_key_entity_map.update({ identifier: entity for identifier in ent_identifiers }) _add_entity_to_memcache(model, mc_key_entity_map)
def GetAsync(keys, **kwargs): """ Asynchronously retrieves one or more entities from the data store. This function is identical to :func:`server.db.Get`, except that it returns an asynchronous object. Call ``get_result()`` on the return value to block on the call and get the results. """ class AsyncResultWrapper: """ Wraps an result thats allready there into something looking like an RPC-Object. """ def __init__(self, res): self.res = res def get_result(self): return (self.res) if conf["viur.db.caching"] > 0 and not datastore.IsInTransaction(): if isinstance(keys, datastore_types.Key) or isinstance( keys, basestring): #Just one: res = memcache.get(str(keys), namespace=__CacheKeyPrefix__) if res: return (AsyncResultWrapper(res)) #Either the result wasnt found, or we got a list of keys to fetch; # --> no caching possible return (datastore.GetAsync(keys, **kwargs))
def reset(self, keep_disabled_flags=False): if datastore.IsInTransaction(): raise RuntimeError( "Clearing the context cache inside a transaction breaks everything, " "we can't let you do that") self.stack = ContextStack() if not keep_disabled_flags: self.memcache_enabled = True self.context_enabled = True
def get_from_cache(unique_identifier): """ Return an entity from the context cache, falling back to memcache when possible """ ensure_context() if not CACHE_ENABLED: return None ret = None if _context.context_enabled: ret = _context.stack.top.get_entity(unique_identifier) if ret is None and not datastore.IsInTransaction(): if _context.memcache_enabled: ret = _get_entity_from_memcache(unique_identifier) elif _context.memcache_enabled and not datastore.IsInTransaction(): ret = _get_entity_from_memcache(unique_identifier) return ret
def get_from_cache_by_key(key): """ Return an entity from the context cache, falling back to memcache when possible """ ensure_context() if not CACHE_ENABLED: return None ret = None if _context.context_enabled: # It's safe to hit the context cache, because a new one was pushed on the stack at the start of the transaction ret = _context.stack.top.get_entity_by_key(key) if ret is None and not datastore.IsInTransaction(): if _context.memcache_enabled: ret = _get_entity_from_memcache_by_key(key) elif _context.memcache_enabled and not datastore.IsInTransaction(): ret = _get_entity_from_memcache_by_key(key) return ret
def clear_context_cache(): """ Resets the context cache, don't do this inside a transaction... in fact, probably just don't do this. """ if datastore.IsInTransaction(): raise RuntimeError( "Clearing the context cache inside a transaction breaks everything, we can't let you do that" ) caching._context.stack = context.ContextStack()
def clear_context_cache(): """ Resets the context cache, don't do this inside a transaction... in fact, probably just don't do this. """ if datastore.IsInTransaction(): raise RuntimeError( "Clearing the context cache inside a transaction breaks everything, we can't let you do that" ) caching.reset_context(keep_disabled_flags=True)
def GetOrInsert(key, kindName=None, parent=None, **kwargs): """ Either creates a new entity with the given key, or returns the existing one. Its guaranteed that there is no race-condition here; it will never overwrite an previously created entity. Extra keyword arguments passed to this function will be used to populate the entity if it has to be created; otherwise they are ignored. :param key: The key which will be fetched or created. \ If key is a string, it will be used as the name for the new entity, therefore the \ collectionName is required in this case. :type key: server.db.Key | String :param kindName: The data kind to use for that entity. Ignored if key is a db.Key. :type kindName: str :param parent: The parent entity of the entity. :type parent: db.Key or None :returns: Returns the wanted Entity. :rtype: server.db.Entity """ def txn(key, kwargs): try: res = datastore.Get(key) except datastore_errors.EntityNotFoundError: res = Entity(kind=key.kind(), parent=key.parent(), name=key.name(), id=key.id()) for k, v in kwargs.items(): res[k] = v datastore.Put(res) return (res) if not isinstance(key, datastore_types.Key): try: key = datastore_types.Key(encoded=key) except: assert kindName key = datastore_types.Key.from_path(kindName, key, parent=parent) if datastore.IsInTransaction(): return txn(key, kwargs) return datastore.RunInTransaction(txn, key, kwargs)
def execute(self): if self.has_pk and not has_concrete_parents(self.model): results = [] # We are inserting, but we specified an ID, we need to check for existence before we Put() # We do it in a loop so each check/put is transactional - because it's an ancestor query it shouldn't # cost any entity groups was_in_transaction = datastore.IsInTransaction() for key, ent in zip(self.included_keys, self.entities): @db.transactional def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError("Tried to INSERT with existing key") id_or_name = key.id_or_name() if isinstance(id_or_name, basestring) and id_or_name.startswith("__"): raise NotSupportedError("Datastore ids cannot start with __. Id was %s" % id_or_name) if not constraints.constraint_checks_enabled(self.model): # Fast path, just insert results.append(datastore.Put(ent)) else: markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) if not was_in_transaction: # We can cache if we weren't in a transaction before this little nested one caching.add_entity_to_cache(self.model, ent, caching.CachingSituation.DATASTORE_GET_PUT) except: # Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise # Make sure we notify app engine that we are using this ID # FIXME: Copy ancestor across to the template key reserve_id(key.kind(), key.id_or_name()) txn() return results else: if not constraints.constraint_checks_enabled(self.model): # Fast path, just bulk insert results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_cache(self.model, entity, caching.CachingSituation.DATASTORE_PUT) return results else: markers = [] try: #FIXME: We should rearrange this so that each entity is handled individually like above. We'll # lose insert performance, but gain consistency on errors which is more important markers = constraints.acquire_bulk(self.model, self.entities) results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_cache(self.model, entity, caching.CachingSituation.DATASTORE_PUT) except: to_delete = chain(*markers) constraints.release_markers(to_delete) raise for ent, k, m in zip(self.entities, results, markers): ent.__key = k constraints.update_instance_on_markers(ent, m) return results
def Get(keys, **kwargs): """ Retrieve one or more entities from the data store. Retrieves the entity or entities with the given key(s) from the data store and returns them as fully populated :class:`server.db.Entity` objects. If there is an error, the function raises a subclass of :exc:`datastore_errors.Error`. If keys is a single key or str, an Entity will be returned, or :exc:`EntityNotFoundError` will be raised if no existing entity matches the key. However, if keys is a list or tuple, a list of entities will be returned that corresponds to the sequence of keys. It will include entities for keys that were found and None placeholders for keys that were not found. :param keys: Key, str or list of keys or strings to be retrieved. :type keys: Key | str | list of Key | list of str :param config: Optional configuration to use for this request. This must be specified\ as a keyword argument. :type config: dict :returns: Entity or list of Entity objects corresponding to the specified key(s). :rtype: :class:`server.db.Entity` | list of :class:`server.db.Entity` """ if conf["viur.db.caching"] > 0 and not datastore.IsInTransaction(): if isinstance(keys, datastore_types.Key) or isinstance( keys, basestring): #Just one: res = memcache.get(str(keys), namespace=__CacheKeyPrefix__) if not res: #Not cached - fetch and cache it :) res = Entity.FromDatastoreEntity(datastore.Get(keys, **kwargs)) res["key"] = str(res.key()) memcache.set(str(res.key()), res, time=__cacheTime__, namespace=__CacheKeyPrefix__) return (res) #Either the result wasnt found, or we got a list of keys to fetch; elif isinstance(keys, list): #Check Memcache first cacheRes = {} tmpRes = [] keyList = [str(x) for x in keys] while keyList: #Fetch in Batches of 30 entries, as the max size for bulk_get is limited to 32MB currentBatch = keyList[:__MemCacheBatchSize__] keyList = keyList[__MemCacheBatchSize__:] cacheRes.update( memcache.get_multi(currentBatch, namespace=__CacheKeyPrefix__)) #Fetch the rest from DB missigKeys = [x for x in keys if not str(x) in cacheRes] dbRes = [ Entity.FromDatastoreEntity(x) for x in datastore.Get(missigKeys) if x is not None ] # Cache what we had fetched saveIdx = 0 while len(dbRes) > saveIdx * __MemCacheBatchSize__: cacheMap = { str(obj.key()): obj for obj in dbRes[saveIdx * __MemCacheBatchSize__:(saveIdx + 1) * __MemCacheBatchSize__] } try: memcache.set_multi(cacheMap, time=__cacheTime__, namespace=__CacheKeyPrefix__) except: pass saveIdx += 1 for key in [str(x) for x in keys]: if key in cacheRes: tmpRes.append(cacheRes[key]) else: for e in dbRes: if str(e.key()) == key: tmpRes.append(e) break if conf["viur.debug.traceQueries"]: logging.debug( "Fetched a result-set from Datastore: %s total, %s from cache, %s from datastore" % (len(tmpRes), len(cacheRes.keys()), len(dbRes))) return (tmpRes) if isinstance(keys, list): return ([ Entity.FromDatastoreEntity(x) for x in datastore.Get(keys, **kwargs) ]) else: return (Entity.FromDatastoreEntity(datastore.Get(keys, **kwargs)))