def delete(models, **kwargs): """Delete one or more Model instances. Note: db.get returns None of the key doesn't exits so we can replace in memcache every deleted keys by None. TODO(sahid): Needs a better doc. """ memclient = memcache.Client() for retry in xrange(DATASTORE_NB_RETRY): try: keys, multiple = datastore.NormalizeAndTypeCheckKeys(models) if not any(keys): return multiple and [] or None # Nothings to do. async = db.delete_async(models, **kwargs) mapping = dict((unicode(k), None) for k in keys) memclient.replace_multi(mapping, time=MEMCACHE_TIME, key_prefix=MEMCACHE_PREFIX) return async.get_result() except (db.Timeout, db.TransactionFailedError, apiproxy_errors.ApplicationError, apiproxy_errors.DeadlineExceededError), e: logging.warn("Error during the delete process, " "retry %d in %.2fs", retry, DATASTORE_TIME_RETRY) logging.debug(e.message) time.sleep(DATASTORE_TIME_RETRY)
def get(keys, **kwargs): """Fetch the specific Model instance with the given key from the datastore. TODO(sahid): Needs a better doc. """ memclient = memcache.Client() for retry in xrange(DATASTORE_NB_RETRY): keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys) if not any(keys): return multiple and [] or None # Nothings to do. keys = set(map(unicode, keys)) debug("Needs to retrieved keys=%s" % keys) ret=[] # First checks keys already in memcached. in_mem = memclient.get_multi(keys, key_prefix=MEMCACHE_PREFIX) debug('dict=%s' % in_mem) ret.extend(unserialize(in_mem.values())) debug("in_mem=%s" % in_mem.keys()) keys = keys.difference(in_mem.keys()) if keys: in_db = db.get(keys, **kwargs) ret.extend(in_db) # We now add every not cached model in memache. # TODO(sahid): Maybe we can factorized this part of code # with put(). #k = [x and unicode(x.key()) or None for x in in_db] debug("in_db=%s, k=%s" % (in_db, keys)) v = serialize(in_db) memclient.add_multi(dict(zip(keys, v)), time=MEMCACHE_TIME, key_prefix=MEMCACHE_PREFIX) if multiple: return ret return ret[0]
def get_entities(keys): rpc = datastore.GetRpcFromKwargs({}) keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys) entities = None try: entities = datastore.Get(keys, rpc=rpc) except datastore_errors.EntityNotFoundError: assert not multiple return entities
def get(keys, **kwargs): keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys) ret = db.get([key for key in keys if key not in __db_get_cache], **kwargs) if (len(ret) == 1) and (ret[0] is None) and (not multiple): return __db_get_cache.update(dict([(x.key(), x) for x in ret if x is not None])) ret = [__db_get_cache.get(k, None) for k in keys] if multiple: return ret if len(ret) > 0: return ret[0]
def get(cls, keys): keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys) if not keys: return [] if multiple else None columns = ['key_id', 'namespace'] columns.extend(sorted(cls._properties.keys())) sql = "SELECT {columns} FROM {table_name} WHERE key_id IN ({key_ids}) AND namespace = %s".format( table_name=cls.__name__, key_ids=', '.join(['%s' for _ in range(len(keys))]), columns=', '.join(columns), ) args = [key.id() for key in keys] args.append(keys[0].namespace()) args = tuple(args) rows = [] with cursor() as db_cursor: try: db_cursor.execute("BEGIN") db_cursor.execute(sql, args) results = db_cursor.fetchall() db_cursor.execute("COMMIT") except Exception as e: _logger.error("Failed to execute SQL: {}".format(sql)) _logger.exception(e.message) db_cursor.execute("ROLLBACK") raise NotImplementedError("{} / {}".format(sql, args)) for result in results: key_id = result[0] namespace = result[1] key = db.Key.from_path(cls.kind(), key_id, namespace=namespace) row = cls(key=key) for index, column in enumerate(columns[2:]): value = result[index + 1] setattr(row, column, value) rows.append(row) grouped_rows = {str(row.key()): row for row in rows} output = [ grouped_rows.get(key_string, None) for key_string in map(str, keys) ] return output
def get_lazy(cls, keys): keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys) results = datastore_lazy.get(keys) return results if multiple else results[0]
def get_async(cls, keys): keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys) rpc = datastore.GetAsync(keys) results = rpc.get_result() return results if multiple else results[0]
def remove(keys): keys, _ = datastore.NormalizeAndTypeCheckKeys(keys) return [__db_get_cache.pop(k) for k in keys if k in __db_get_cache]